diff -Nru libopenshot-0.2.2+dfsg1/cmake/Modules/CodeCoverage.cmake libopenshot-0.2.5+dfsg1/cmake/Modules/CodeCoverage.cmake --- libopenshot-0.2.2+dfsg1/cmake/Modules/CodeCoverage.cmake 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/cmake/Modules/CodeCoverage.cmake 2020-03-03 08:00:06.000000000 +0000 @@ -0,0 +1,435 @@ +# Copyright (c) 2012 - 2017, Lars Bilke +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without modification, +# are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# 3. Neither the name of the copyright holder nor the names of its contributors +# may be used to endorse or promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +# CHANGES: +# +# 2012-01-31, Lars Bilke +# - Enable Code Coverage +# +# 2013-09-17, Joakim Söderberg +# - Added support for Clang. +# - Some additional usage instructions. +# +# 2016-02-03, Lars Bilke +# - Refactored functions to use named parameters +# +# 2017-06-02, Lars Bilke +# - Merged with modified version from github.com/ufz/ogs +# +# 2019-05-06, Anatolii Kurotych +# - Remove unnecessary --coverage flag +# +# 2019-12-13, FeRD (Frank Dana) +# - Deprecate COVERAGE_LCOVR_EXCLUDES and COVERAGE_GCOVR_EXCLUDES lists in favor +# of tool-agnostic COVERAGE_EXCLUDES variable, or EXCLUDE setup arguments. +# - CMake 3.4+: All excludes can be specified relative to BASE_DIRECTORY +# - All setup functions: accept BASE_DIRECTORY, EXCLUDE list +# - Set lcov basedir with -b argument +# - Add automatic --demangle-cpp in lcovr, if 'c++filt' is available (can be +# overridden with NO_DEMANGLE option in setup_target_for_coverage_lcovr().) +# - Delete output dir, .info file on 'make clean' +# - Remove Python detection, since version mismatches will break gcovr +# - Minor cleanup (lowercase function names, update examples...) +# +# 2019-12-19, FeRD (Frank Dana) +# - Rename Lcov outputs, make filtered file canonical, fix cleanup for targets +# +# 2020-01-19, Bob Apthorpe +# - Added gfortran support +# +# 2020-02-17, FeRD (Frank Dana) +# - Make all add_custom_target()s VERBATIM to auto-escape wildcard characters +# in EXCLUDEs, and remove manual escaping from gcovr targets +# +# USAGE: +# +# 1. Copy this file into your cmake modules path. +# +# 2. Add the following line to your CMakeLists.txt: +# include(CodeCoverage) +# +# 3. Append necessary compiler flags: +# append_coverage_compiler_flags() +# +# 3.a (OPTIONAL) Set appropriate optimization flags, e.g. -O0, -O1 or -Og +# +# 4. If you need to exclude additional directories from the report, specify them +# using full paths in the COVERAGE_EXCLUDES variable before calling +# setup_target_for_coverage_*(). +# Example: +# set(COVERAGE_EXCLUDES +# '${PROJECT_SOURCE_DIR}/src/dir1/*' +# '/path/to/my/src/dir2/*') +# Or, use the EXCLUDE argument to setup_target_for_coverage_*(). +# Example: +# setup_target_for_coverage_lcov( +# NAME coverage +# EXECUTABLE testrunner +# EXCLUDE "${PROJECT_SOURCE_DIR}/src/dir1/*" "/path/to/my/src/dir2/*") +# +# 4.a NOTE: With CMake 3.4+, COVERAGE_EXCLUDES or EXCLUDE can also be set +# relative to the BASE_DIRECTORY (default: PROJECT_SOURCE_DIR) +# Example: +# set(COVERAGE_EXCLUDES "dir1/*") +# setup_target_for_coverage_gcovr_html( +# NAME coverage +# EXECUTABLE testrunner +# BASE_DIRECTORY "${PROJECT_SOURCE_DIR}/src" +# EXCLUDE "dir2/*") +# +# 5. Use the functions described below to create a custom make target which +# runs your test executable and produces a code coverage report. +# +# 6. Build a Debug build: +# cmake -DCMAKE_BUILD_TYPE=Debug .. +# make +# make my_coverage_target +# + +include(CMakeParseArguments) + +# Check prereqs +find_program( GCOV_PATH gcov ) +find_program( LCOV_PATH NAMES lcov lcov.bat lcov.exe lcov.perl) +find_program( GENHTML_PATH NAMES genhtml genhtml.perl genhtml.bat ) +find_program( GCOVR_PATH gcovr PATHS ${CMAKE_SOURCE_DIR}/scripts/test) +find_program( CPPFILT_PATH NAMES c++filt ) + +if(NOT GCOV_PATH) + message(FATAL_ERROR "gcov not found! Aborting...") +endif() # NOT GCOV_PATH + +if("${CMAKE_CXX_COMPILER_ID}" MATCHES "(Apple)?[Cc]lang") + if("${CMAKE_CXX_COMPILER_VERSION}" VERSION_LESS 3) + message(FATAL_ERROR "Clang version must be 3.0.0 or greater! Aborting...") + endif() +elseif(NOT CMAKE_COMPILER_IS_GNUCXX) + if("${CMAKE_Fortran_COMPILER_ID}" MATCHES "[Ff]lang") + # Do nothing; exit conditional without error if true + elseif("${CMAKE_Fortran_COMPILER_ID}" MATCHES "GNU") + # Do nothing; exit conditional without error if true + else() + message(FATAL_ERROR "Compiler is not GNU gcc! Aborting...") + endif() +endif() + +set(COVERAGE_COMPILER_FLAGS "-g -fprofile-arcs -ftest-coverage" + CACHE INTERNAL "") + +set(CMAKE_Fortran_FLAGS_COVERAGE + ${COVERAGE_COMPILER_FLAGS} + CACHE STRING "Flags used by the Fortran compiler during coverage builds." + FORCE ) +set(CMAKE_CXX_FLAGS_COVERAGE + ${COVERAGE_COMPILER_FLAGS} + CACHE STRING "Flags used by the C++ compiler during coverage builds." + FORCE ) +set(CMAKE_C_FLAGS_COVERAGE + ${COVERAGE_COMPILER_FLAGS} + CACHE STRING "Flags used by the C compiler during coverage builds." + FORCE ) +set(CMAKE_EXE_LINKER_FLAGS_COVERAGE + "" + CACHE STRING "Flags used for linking binaries during coverage builds." + FORCE ) +set(CMAKE_SHARED_LINKER_FLAGS_COVERAGE + "" + CACHE STRING "Flags used by the shared libraries linker during coverage builds." + FORCE ) +mark_as_advanced( + CMAKE_Fortran_FLAGS_COVERAGE + CMAKE_CXX_FLAGS_COVERAGE + CMAKE_C_FLAGS_COVERAGE + CMAKE_EXE_LINKER_FLAGS_COVERAGE + CMAKE_SHARED_LINKER_FLAGS_COVERAGE ) + +if(NOT CMAKE_BUILD_TYPE STREQUAL "Debug") + message(WARNING "Code coverage results with an optimised (non-Debug) build may be misleading") +endif() # NOT CMAKE_BUILD_TYPE STREQUAL "Debug" + +if(CMAKE_C_COMPILER_ID STREQUAL "GNU" OR CMAKE_Fortran_COMPILER_ID STREQUAL "GNU") + link_libraries(gcov) +endif() + +# Defines a target for running and collection code coverage information +# Builds dependencies, runs the given executable and outputs reports. +# NOTE! The executable should always have a ZERO as exit code otherwise +# the coverage generation will not complete. +# +# setup_target_for_coverage_lcov( +# NAME testrunner_coverage # New target name +# EXECUTABLE testrunner -j ${PROCESSOR_COUNT} # Executable in PROJECT_BINARY_DIR +# DEPENDENCIES testrunner # Dependencies to build first +# BASE_DIRECTORY "../" # Base directory for report +# # (defaults to PROJECT_SOURCE_DIR) +# EXCLUDE "src/dir1/*" "src/dir2/*" # Patterns to exclude (can be relative +# # to BASE_DIRECTORY, with CMake 3.4+) +# NO_DEMANGLE # Don't demangle C++ symbols +# # even if c++filt is found +# ) +function(setup_target_for_coverage_lcov) + + set(options NO_DEMANGLE) + set(oneValueArgs BASE_DIRECTORY NAME) + set(multiValueArgs EXCLUDE EXECUTABLE EXECUTABLE_ARGS DEPENDENCIES LCOV_ARGS GENHTML_ARGS) + cmake_parse_arguments(Coverage "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN}) + + if(NOT LCOV_PATH) + message(FATAL_ERROR "lcov not found! Aborting...") + endif() # NOT LCOV_PATH + + if(NOT GENHTML_PATH) + message(FATAL_ERROR "genhtml not found! Aborting...") + endif() # NOT GENHTML_PATH + + # Set base directory (as absolute path), or default to PROJECT_SOURCE_DIR + if(${Coverage_BASE_DIRECTORY}) + get_filename_component(BASEDIR ${Coverage_BASE_DIRECTORY} ABSOLUTE) + else() + set(BASEDIR ${PROJECT_SOURCE_DIR}) + endif() + + # Collect excludes (CMake 3.4+: Also compute absolute paths) + set(LCOV_EXCLUDES "") + foreach(EXCLUDE ${Coverage_EXCLUDE} ${COVERAGE_EXCLUDES} ${COVERAGE_LCOV_EXCLUDES}) + if(CMAKE_VERSION VERSION_GREATER 3.4) + get_filename_component(EXCLUDE ${EXCLUDE} ABSOLUTE BASE_DIR ${BASEDIR}) + endif() + list(APPEND LCOV_EXCLUDES "${EXCLUDE}") + endforeach() + list(REMOVE_DUPLICATES LCOV_EXCLUDES) + + # Conditional arguments + if(CPPFILT_PATH AND NOT ${Coverage_NO_DEMANGLE}) + set(GENHTML_EXTRA_ARGS "--demangle-cpp") + endif() + + # Setup target + add_custom_target(${Coverage_NAME} + + # Cleanup lcov + COMMAND ${LCOV_PATH} ${Coverage_LCOV_ARGS} --gcov-tool ${GCOV_PATH} -directory . -b ${BASEDIR} --zerocounters + # Create baseline to make sure untouched files show up in the report + COMMAND ${LCOV_PATH} ${Coverage_LCOV_ARGS} --gcov-tool ${GCOV_PATH} -c -i -d . -b ${BASEDIR} -o ${Coverage_NAME}.base + + # Run tests + COMMAND ${Coverage_EXECUTABLE} ${Coverage_EXECUTABLE_ARGS} + + # Capturing lcov counters and generating report + COMMAND ${LCOV_PATH} ${Coverage_LCOV_ARGS} --gcov-tool ${GCOV_PATH} --directory . -b ${BASEDIR} --capture --output-file ${Coverage_NAME}.capture + # add baseline counters + COMMAND ${LCOV_PATH} ${Coverage_LCOV_ARGS} --gcov-tool ${GCOV_PATH} -a ${Coverage_NAME}.base -a ${Coverage_NAME}.capture --output-file ${Coverage_NAME}.total + # filter collected data to final coverage report + COMMAND ${LCOV_PATH} ${Coverage_LCOV_ARGS} --gcov-tool ${GCOV_PATH} --remove ${Coverage_NAME}.total ${LCOV_EXCLUDES} --output-file ${Coverage_NAME}.info + + # Generate HTML output + COMMAND ${GENHTML_PATH} ${GENHTML_EXTRA_ARGS} ${Coverage_GENHTML_ARGS} -o ${Coverage_NAME} ${Coverage_NAME}.info + + # Set output files as GENERATED (will be removed on 'make clean') + BYPRODUCTS + ${Coverage_NAME}.base + ${Coverage_NAME}.capture + ${Coverage_NAME}.total + ${Coverage_NAME}.info + ${Coverage_NAME} # report directory + + WORKING_DIRECTORY ${PROJECT_BINARY_DIR} + DEPENDS ${Coverage_DEPENDENCIES} + VERBATIM # Protect arguments to commands + COMMENT "Resetting code coverage counters to zero.\nProcessing code coverage counters and generating report." + ) + + # Show where to find the lcov info report + add_custom_command(TARGET ${Coverage_NAME} POST_BUILD + COMMAND ; + COMMENT "Lcov code coverage info report saved in ${Coverage_NAME}.info." + ) + + # Show info where to find the report + add_custom_command(TARGET ${Coverage_NAME} POST_BUILD + COMMAND ; + COMMENT "Open ./${Coverage_NAME}/index.html in your browser to view the coverage report." + ) + +endfunction() # setup_target_for_coverage_lcov + +# Defines a target for running and collection code coverage information +# Builds dependencies, runs the given executable and outputs reports. +# NOTE! The executable should always have a ZERO as exit code otherwise +# the coverage generation will not complete. +# +# setup_target_for_coverage_gcovr_xml( +# NAME ctest_coverage # New target name +# EXECUTABLE ctest -j ${PROCESSOR_COUNT} # Executable in PROJECT_BINARY_DIR +# DEPENDENCIES executable_target # Dependencies to build first +# BASE_DIRECTORY "../" # Base directory for report +# # (defaults to PROJECT_SOURCE_DIR) +# EXCLUDE "src/dir1/*" "src/dir2/*" # Patterns to exclude (can be relative +# # to BASE_DIRECTORY, with CMake 3.4+) +# ) +function(setup_target_for_coverage_gcovr_xml) + + set(options NONE) + set(oneValueArgs BASE_DIRECTORY NAME) + set(multiValueArgs EXCLUDE EXECUTABLE EXECUTABLE_ARGS DEPENDENCIES) + cmake_parse_arguments(Coverage "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN}) + + if(NOT GCOVR_PATH) + message(FATAL_ERROR "gcovr not found! Aborting...") + endif() # NOT GCOVR_PATH + + # Set base directory (as absolute path), or default to PROJECT_SOURCE_DIR + if(${Coverage_BASE_DIRECTORY}) + get_filename_component(BASEDIR ${Coverage_BASE_DIRECTORY} ABSOLUTE) + else() + set(BASEDIR ${PROJECT_SOURCE_DIR}) + endif() + + # Collect excludes (CMake 3.4+: Also compute absolute paths) + set(GCOVR_EXCLUDES "") + foreach(EXCLUDE ${Coverage_EXCLUDE} ${COVERAGE_EXCLUDES} ${COVERAGE_GCOVR_EXCLUDES}) + if(CMAKE_VERSION VERSION_GREATER 3.4) + get_filename_component(EXCLUDE ${EXCLUDE} ABSOLUTE BASE_DIR ${BASEDIR}) + endif() + list(APPEND GCOVR_EXCLUDES "${EXCLUDE}") + endforeach() + list(REMOVE_DUPLICATES GCOVR_EXCLUDES) + + # Combine excludes to several -e arguments + set(GCOVR_EXCLUDE_ARGS "") + foreach(EXCLUDE ${GCOVR_EXCLUDES}) + list(APPEND GCOVR_EXCLUDE_ARGS "-e") + list(APPEND GCOVR_EXCLUDE_ARGS "${EXCLUDE}") + endforeach() + + add_custom_target(${Coverage_NAME} + # Run tests + ${Coverage_EXECUTABLE} ${Coverage_EXECUTABLE_ARGS} + + # Running gcovr + COMMAND ${GCOVR_PATH} --xml + -r ${BASEDIR} ${GCOVR_EXCLUDE_ARGS} + --object-directory=${PROJECT_BINARY_DIR} + -o ${Coverage_NAME}.xml + BYPRODUCTS ${Coverage_NAME}.xml + WORKING_DIRECTORY ${PROJECT_BINARY_DIR} + DEPENDS ${Coverage_DEPENDENCIES} + VERBATIM # Protect arguments to commands + COMMENT "Running gcovr to produce Cobertura code coverage report." + ) + + # Show info where to find the report + add_custom_command(TARGET ${Coverage_NAME} POST_BUILD + COMMAND ; + COMMENT "Cobertura code coverage report saved in ${Coverage_NAME}.xml." + ) +endfunction() # setup_target_for_coverage_gcovr_xml + +# Defines a target for running and collection code coverage information +# Builds dependencies, runs the given executable and outputs reports. +# NOTE! The executable should always have a ZERO as exit code otherwise +# the coverage generation will not complete. +# +# setup_target_for_coverage_gcovr_html( +# NAME ctest_coverage # New target name +# EXECUTABLE ctest -j ${PROCESSOR_COUNT} # Executable in PROJECT_BINARY_DIR +# DEPENDENCIES executable_target # Dependencies to build first +# BASE_DIRECTORY "../" # Base directory for report +# # (defaults to PROJECT_SOURCE_DIR) +# EXCLUDE "src/dir1/*" "src/dir2/*" # Patterns to exclude (can be relative +# # to BASE_DIRECTORY, with CMake 3.4+) +# ) +function(setup_target_for_coverage_gcovr_html) + + set(options NONE) + set(oneValueArgs BASE_DIRECTORY NAME) + set(multiValueArgs EXCLUDE EXECUTABLE EXECUTABLE_ARGS DEPENDENCIES) + cmake_parse_arguments(Coverage "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN}) + + if(NOT GCOVR_PATH) + message(FATAL_ERROR "gcovr not found! Aborting...") + endif() # NOT GCOVR_PATH + + # Set base directory (as absolute path), or default to PROJECT_SOURCE_DIR + if(${Coverage_BASE_DIRECTORY}) + get_filename_component(BASEDIR ${Coverage_BASE_DIRECTORY} ABSOLUTE) + else() + set(BASEDIR ${PROJECT_SOURCE_DIR}) + endif() + + # Collect excludes (CMake 3.4+: Also compute absolute paths) + set(GCOVR_EXCLUDES "") + foreach(EXCLUDE ${Coverage_EXCLUDE} ${COVERAGE_EXCLUDES} ${COVERAGE_GCOVR_EXCLUDES}) + if(CMAKE_VERSION VERSION_GREATER 3.4) + get_filename_component(EXCLUDE ${EXCLUDE} ABSOLUTE BASE_DIR ${BASEDIR}) + endif() + list(APPEND GCOVR_EXCLUDES "${EXCLUDE}") + endforeach() + list(REMOVE_DUPLICATES GCOVR_EXCLUDES) + + # Combine excludes to several -e arguments + set(GCOVR_EXCLUDE_ARGS "") + foreach(EXCLUDE ${GCOVR_EXCLUDES}) + list(APPEND GCOVR_EXCLUDE_ARGS "-e") + list(APPEND GCOVR_EXCLUDE_ARGS "${EXCLUDE}") + endforeach() + + add_custom_target(${Coverage_NAME} + # Run tests + ${Coverage_EXECUTABLE} ${Coverage_EXECUTABLE_ARGS} + + # Create folder + COMMAND ${CMAKE_COMMAND} -E make_directory ${PROJECT_BINARY_DIR}/${Coverage_NAME} + + # Running gcovr + COMMAND ${GCOVR_PATH} --html --html-details + -r ${BASEDIR} ${GCOVR_EXCLUDE_ARGS} + --object-directory=${PROJECT_BINARY_DIR} + -o ${Coverage_NAME}/index.html + + BYPRODUCTS ${PROJECT_BINARY_DIR}/${Coverage_NAME} # report directory + WORKING_DIRECTORY ${PROJECT_BINARY_DIR} + DEPENDS ${Coverage_DEPENDENCIES} + VERBATIM # Protect arguments to commands + COMMENT "Running gcovr to produce HTML code coverage report." + ) + + # Show info where to find the report + add_custom_command(TARGET ${Coverage_NAME} POST_BUILD + COMMAND ; + COMMENT "Open ./${Coverage_NAME}/index.html in your browser to view the coverage report." + ) + +endfunction() # setup_target_for_coverage_gcovr_html + +function(append_coverage_compiler_flags) + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE) + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE) + set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE) + message(STATUS "Appending code coverage compiler flags: ${COVERAGE_COMPILER_FLAGS}") +endfunction() # append_coverage_compiler_flags diff -Nru libopenshot-0.2.2+dfsg1/cmake/Modules/FindFFmpeg.cmake libopenshot-0.2.5+dfsg1/cmake/Modules/FindFFmpeg.cmake --- libopenshot-0.2.2+dfsg1/cmake/Modules/FindFFmpeg.cmake 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/cmake/Modules/FindFFmpeg.cmake 2020-03-03 08:00:06.000000000 +0000 @@ -1,55 +1,97 @@ # vim: ts=2 sw=2 -# - Try to find the required ffmpeg components(default: AVFORMAT, AVUTIL, AVCODEC) -# -# Once done this will define -# FFMPEG_FOUND - System has the all required components. -# FFMPEG_INCLUDE_DIRS - Include directory necessary for using the required components headers. -# FFMPEG_LIBRARIES - Link these to use the required ffmpeg components. -# FFMPEG_DEFINITIONS - Compiler switches required for using the required ffmpeg components. -# -# For each of the components it will additionally set. -# - AVCODEC -# - AVDEVICE -# - AVFORMAT -# - AVFILTER -# - AVUTIL -# - POSTPROC -# - SWSCALE -# - SWRESAMPLE -# - AVRESAMPLE -# the following variables will be defined -# _FOUND - System has -# _INCLUDE_DIRS - Include directory necessary for using the headers -# _LIBRARIES - Link these to use -# _DEFINITIONS - Compiler switches required for using -# _VERSION - The components version -# -# Copyright (c) 2006, Matthias Kretz, -# Copyright (c) 2008, Alexander Neundorf, -# Copyright (c) 2011, Michael Jansen, -# -# Redistribution and use is allowed according to the terms of the BSD license. -# For details see the accompanying COPYING-CMAKE-SCRIPTS file. +#[=======================================================================[.rst: +FindFFmpeg +---------- +Try to find the requested ffmpeg components(default: avformat, avutil, avcodec) + +IMPORTED targets +^^^^^^^^^^^^^^^^ + +This module defines :prop_tgt:`IMPORTED` targets ``FFmpeg:`` for +each found component (see below). + +Components +^^^^^^^^^^ + +The module recognizes the following components: + +:: + + avcodec - target FFmpeg::avcodec + avdevice - target FFmpeg::avdevice + avformat - target FFmpeg::avformat + avfilter - target FFmpeg::avfilter + avutil - target FFmpeg::avutil + postproc - target FFmpeg::postproc + swscale - target FFmpeg::swscale + swresample - target FFmpeg::swresample + avresample - target FFmpeg::avresample + +Result Variables +^^^^^^^^^^^^^^^^ + +This module defines the following variables: + +:: + + FFMPEG_FOUND - System has the all required components. + FFMPEG_INCLUDE_DIRS - Include directory necessary for using the required components headers. + FFMPEG_LIBRARIES - Link these to use the required ffmpeg components. + FFMPEG_DEFINITIONS - Compiler switches required for using the required ffmpeg components. + +For each component, ``_FOUND`` will be set if the component is available. + +For each ``_FOUND``, the following variables will be defined: + +:: + + _INCLUDE_DIRS - Include directory necessary for using the headers + _LIBRARIES - Link these to use + _DEFINITIONS - Compiler switches required for using + _VERSION - The components version + +Backwards compatibility +^^^^^^^^^^^^^^^^^^^^^^^ + +For compatibility with previous versions of this module, uppercase names +for FFmpeg and for all components are also recognized, and all-uppercase +versions of the cache variables are also created. +Copyright (c) 2006, Matthias Kretz, +Copyright (c) 2008, Alexander Neundorf, +Copyright (c) 2011, Michael Jansen, +Copyright (c) 2019, FeRD (Frank Dana) + +Redistribution and use is allowed according to the terms of the BSD license. +For details see the accompanying COPYING-CMAKE-SCRIPTS file. +#]=======================================================================] include(FindPackageHandleStandardArgs) -# The default components were taken from a survey over other FindFFMPEG.cmake files +set(FFmpeg_ALL_COMPONENTS avcodec avdevice avformat avfilter avutil postproc swscale swresample avresample) + +# Default to all components, if not specified +if (FFMPEG_FIND_COMPONENTS AND NOT FFmpeg_FIND_COMPONENTS) + set(FFmpeg_FIND_COMPONENTS ${FFMPEG_FIND_COMPONENTS}) +endif () if (NOT FFmpeg_FIND_COMPONENTS) - set(FFmpeg_FIND_COMPONENTS AVCODEC AVFORMAT AVUTIL) + set(FFmpeg_FIND_COMPONENTS ${FFmpeg_ALL_COMPONENTS}) endif () + # ### Macro: set_component_found # # Marks the given component as found if both *_LIBRARIES AND *_INCLUDE_DIRS is present. # macro(set_component_found _component ) - if (${_component}_LIBRARIES AND ${_component}_INCLUDE_DIRS) - # message(STATUS " - ${_component} found.") - set(${_component}_FOUND TRUE) - else () - # message(STATUS " - ${_component} not found.") - endif () + if (${_component}_LIBRARIES AND ${_component}_INCLUDE_DIRS) + # message(STATUS "FFmpeg - ${_component} found.") + set(${_component}_FOUND TRUE) + else () + if (NOT FFmpeg_FIND_QUIETLY AND NOT FFMPEG_FIND_QUIETLY) + message(STATUS "FFmpeg - ${_component} not found.") + endif () + endif () endmacro() # @@ -60,102 +102,153 @@ # macro(find_component _component _pkgconfig _library _header) - if (NOT WIN32) - # use pkg-config to get the directories and then use these values - # in the FIND_PATH() and FIND_LIBRARY() calls - find_package(PkgConfig) - if (PKG_CONFIG_FOUND) - pkg_check_modules(PC_${_component} ${_pkgconfig}) - endif () - endif (NOT WIN32) - - find_path(${_component}_INCLUDE_DIRS ${_header} - HINTS - /opt/ - /opt/include/ - ${PC_LIB${_component}_INCLUDEDIR} - ${PC_LIB${_component}_INCLUDE_DIRS} - $ENV{FFMPEGDIR}/include/ - $ENV{FFMPEGDIR}/include/ffmpeg/ - PATH_SUFFIXES - ffmpeg - ) - - find_library(${_component}_LIBRARIES NAMES ${_library} - HINTS - ${PC_LIB${_component}_LIBDIR} - ${PC_LIB${_component}_LIBRARY_DIRS} - $ENV{FFMPEGDIR}/lib/ - $ENV{FFMPEGDIR}/lib/ffmpeg/ - $ENV{FFMPEGDIR}/bin/ - ) - - set(${_component}_DEFINITIONS ${PC_${_component}_CFLAGS_OTHER} CACHE STRING "The ${_component} CFLAGS.") - set(${_component}_VERSION ${PC_${_component}_VERSION} CACHE STRING "The ${_component} version number.") - - set_component_found(${_component}) - - mark_as_advanced( - ${_component}_INCLUDE_DIRS - ${_component}_LIBRARIES - ${_component}_DEFINITIONS - ${_component}_VERSION) + if (NOT WIN32) + # use pkg-config to get the directories and then use these values + # in the FIND_PATH() and FIND_LIBRARY() calls + find_package(PkgConfig) + if (PKG_CONFIG_FOUND) + pkg_check_modules(PC_${_component} ${_pkgconfig}) + endif () + endif (NOT WIN32) + + find_path(${_component}_INCLUDE_DIRS ${_header} + HINTS + /opt/ + /opt/include/ + ${PC_${_component}_INCLUDEDIR} + ${PC_${_component}_INCLUDE_DIRS} + $ENV{FFMPEGDIR}/include/ + $ENV{FFMPEGDIR}/include/ffmpeg/ + PATH_SUFFIXES + ffmpeg + ) + + find_library(${_component}_LIBRARIES NAMES ${_library} + HINTS + ${PC_${_component}_LIBDIR} + ${PC_${_component}_LIBRARY_DIRS} + $ENV{FFMPEGDIR}/lib/ + $ENV{FFMPEGDIR}/lib/ffmpeg/ + $ENV{FFMPEGDIR}/bin/ + ) + + set(${_component}_DEFINITIONS ${PC_${_component}_CFLAGS_OTHER} CACHE STRING "The ${_component} CFLAGS.") + set(${_component}_VERSION ${PC_${_component}_VERSION} CACHE STRING "The ${_component} version number.") + + set_component_found(${_component}) + + mark_as_advanced( + ${_component}_INCLUDE_DIRS + ${_component}_LIBRARIES + ${_component}_DEFINITIONS + ${_component}_VERSION + ) endmacro() # Check for cached results. If there are skip the costly part. -if (NOT FFMPEG_LIBRARIES) +if (NOT FFmpeg_LIBRARIES) - # Check for all possible component. - find_component(AVCODEC libavcodec avcodec libavcodec/avcodec.h) - find_component(AVFORMAT libavformat avformat libavformat/avformat.h) - find_component(AVDEVICE libavdevice avdevice libavdevice/avdevice.h) - find_component(AVUTIL libavutil avutil libavutil/avutil.h) - find_component(AVFILTER libavfilter avfilter libavfilter/avfilter.h) - find_component(SWSCALE libswscale swscale libswscale/swscale.h) - find_component(POSTPROC libpostproc postproc libpostproc/postprocess.h) - find_component(SWRESAMPLE libswresample swresample libswresample/swresample.h) - find_component(AVRESAMPLE libavresample avresample libavresample/avresample.h) - - # Check if the required components were found and add their stuff to the FFMPEG_* vars. - foreach (_component ${FFmpeg_FIND_COMPONENTS}) - if (${_component}_FOUND) - # message(STATUS "Required component ${_component} present.") - set(FFMPEG_LIBRARIES ${FFMPEG_LIBRARIES} ${${_component}_LIBRARIES}) - set(FFMPEG_DEFINITIONS ${FFMPEG_DEFINITIONS} ${${_component}_DEFINITIONS}) - list(APPEND FFMPEG_INCLUDE_DIRS ${${_component}_INCLUDE_DIRS}) - else () - # message(STATUS "Required component ${_component} missing.") - endif () - endforeach () - - # Build the include path with duplicates removed. - if (FFMPEG_INCLUDE_DIRS) - list(REMOVE_DUPLICATES FFMPEG_INCLUDE_DIRS) - endif () - - # cache the vars. - set(FFMPEG_INCLUDE_DIRS ${FFMPEG_INCLUDE_DIRS} CACHE STRING "The FFmpeg include directories." FORCE) - set(FFMPEG_LIBRARIES ${FFMPEG_LIBRARIES} CACHE STRING "The FFmpeg libraries." FORCE) - set(FFMPEG_DEFINITIONS ${FFMPEG_DEFINITIONS} CACHE STRING "The FFmpeg cflags." FORCE) - - mark_as_advanced(FFMPEG_INCLUDE_DIRS - FFMPEG_LIBRARIES - FFMPEG_DEFINITIONS) + # Check for all possible component. + find_component(avcodec libavcodec avcodec libavcodec/avcodec.h) + find_component(avdevice libavdevice avdevice libavdevice/avdevice.h) + find_component(avformat libavformat avformat libavformat/avformat.h) + find_component(avfilter libavfilter avfilter libavfilter/avfilter.h) + find_component(avutil libavutil avutil libavutil/avutil.h) + find_component(postproc libpostproc postproc libpostproc/postprocess.h) + find_component(swscale libswscale swscale libswscale/swscale.h) + find_component(swresample libswresample swresample libswresample/swresample.h) + find_component(avresample libavresample avresample libavresample/avresample.h) +else() + # Just set the noncached _FOUND vars for the components. + foreach(_component ${FFmpeg_ALL_COMPONENTS}) + set_component_found(${_component}) + endforeach () +endif() +# Check if the requested components were found and add their stuff to the FFmpeg_* vars. +foreach (_component ${FFmpeg_FIND_COMPONENTS}) + string(TOLOWER "${_component}" _component) + if (${_component}_FOUND) + # message(STATUS "Requested component ${_component} present.") + set(FFmpeg_LIBRARIES ${FFmpeg_LIBRARIES} ${${_component}_LIBRARIES}) + set(FFmpeg_DEFINITIONS ${FFmpeg_DEFINITIONS} ${${_component}_DEFINITIONS}) + list(APPEND FFmpeg_INCLUDE_DIRS ${${_component}_INCLUDE_DIRS}) + else () + # message(STATUS "Requested component ${_component} missing.") + endif () +endforeach () + +# Build the result lists with duplicates removed, in case of repeated +# invocations. +if (FFmpeg_INCLUDE_DIRS) + list(REMOVE_DUPLICATES FFmpeg_INCLUDE_DIRS) +endif() +if (FFmpeg_LIBRARIES) + list(REMOVE_DUPLICATES FFmpeg_LIBRARIES) +endif() +if(FFmpeg_DEFINITIONS) + list(REMOVE_DUPLICATES FFmpeg_DEFINITIONS) endif () -# Now set the noncached _FOUND vars for the components. -foreach (_component AVCODEC AVDEVICE AVFORMAT AVUTIL POSTPROCESS SWSCALE SWRESAMPLE AVRESAMPLE) - set_component_found(${_component}) -endforeach () +# cache the vars. +set(FFmpeg_INCLUDE_DIRS ${FFmpeg_INCLUDE_DIRS} CACHE STRING "The FFmpeg include directories." FORCE) +set(FFmpeg_LIBRARIES ${FFmpeg_LIBRARIES} CACHE STRING "The FFmpeg libraries." FORCE) +set(FFmpeg_DEFINITIONS ${FFmpeg_DEFINITIONS} CACHE STRING "The FFmpeg cflags." FORCE) + +mark_as_advanced(FFmpeg_INCLUDE_DIRS + FFmpeg_LIBRARIES + FFmpeg_DEFINITIONS) + +# Backwards compatibility +foreach(_suffix INCLUDE_DIRS LIBRARIES DEFINITIONS) + get_property(_help CACHE FFmpeg_${_suffix} PROPERTY HELPSTRING) + set(FFMPEG_${_suffix} ${FFmpeg_${_suffix}} CACHE STRING "${_help}" FORCE) + mark_as_advanced(FFMPEG_${_suffix}) +endforeach() +foreach(_component ${FFmpeg_ALL_COMPONENTS}) + if(${_component}_FOUND) + string(TOUPPER "${_component}" _uc_component) + set(${_uc_component}_FOUND TRUE) + foreach(_suffix INCLUDE_DIRS LIBRARIES DEFINITIONS VERSION) + get_property(_help CACHE ${_component}_${_suffix} PROPERTY HELPSTRING) + set(${_uc_component}_${_suffix} ${${_component}_${_suffix}} CACHE STRING "${_help}" FORCE) + mark_as_advanced(${_uc_component}_${_suffix}) + endforeach() + endif() +endforeach() # Compile the list of required vars -set(_FFmpeg_REQUIRED_VARS FFMPEG_LIBRARIES FFMPEG_INCLUDE_DIRS) +set(_FFmpeg_REQUIRED_VARS FFmpeg_LIBRARIES FFmpeg_INCLUDE_DIRS) foreach (_component ${FFmpeg_FIND_COMPONENTS}) - list(APPEND _FFmpeg_REQUIRED_VARS ${_component}_LIBRARIES ${_component}_INCLUDE_DIRS) + list(APPEND _FFmpeg_REQUIRED_VARS + ${_component}_LIBRARIES + ${_component}_INCLUDE_DIRS) endforeach () # Give a nice error message if some of the required vars are missing. -find_package_handle_standard_args(FFmpeg DEFAULT_MSG ${_FFmpeg_REQUIRED_VARS}) \ No newline at end of file +find_package_handle_standard_args(FFmpeg DEFAULT_MSG ${_FFmpeg_REQUIRED_VARS}) + +# Export targets for each found component +foreach (_component ${FFmpeg_ALL_COMPONENTS}) + + if(${_component}_FOUND) + # message(STATUS "Creating IMPORTED target FFmpeg::${_component}") + + if(NOT TARGET FFmpeg::${_component}) + add_library(FFmpeg::${_component} UNKNOWN IMPORTED) + + set_target_properties(FFmpeg::${_component} PROPERTIES + INTERFACE_INCLUDE_DIRECTORIES "${${_component}_INCLUDE_DIRS}") + + set_property(TARGET FFmpeg::${_component} APPEND PROPERTY + INTERFACE_COMPILE_DEFINITIONS "${${_component}_DEFINITIONS}") + + set_property(TARGET FFmpeg::${_component} APPEND PROPERTY + IMPORTED_LOCATION "${${_component}_LIBRARIES}") + endif() + + endif() + +endforeach() diff -Nru libopenshot-0.2.2+dfsg1/cmake/Modules/FindOpenShotAudio.cmake libopenshot-0.2.5+dfsg1/cmake/Modules/FindOpenShotAudio.cmake --- libopenshot-0.2.2+dfsg1/cmake/Modules/FindOpenShotAudio.cmake 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/cmake/Modules/FindOpenShotAudio.cmake 2020-03-03 08:00:06.000000000 +0000 @@ -5,55 +5,112 @@ # LIBOPENSHOT_AUDIO_INCLUDE_DIRS - The juce.h include directories # LIBOPENSHOT_AUDIO_LIBRARIES - The libraries needed to use juce -message("$ENV{LIBOPENSHOT_AUDIO_DIR}") +if("$ENV{LIBOPENSHOT_AUDIO_DIR}" AND NOT "${OpenShotAudio_FIND_QUIETLY}") + message(STATUS "Looking for OpenShotAudio in: $ENV{LIBOPENSHOT_AUDIO_DIR}") +endif() + +# Find the libopenshot-audio header files (check env/cache vars first) +find_path( + LIBOPENSHOT_AUDIO_INCLUDE_DIR + JuceHeader.h + HINTS + ENV LIBOPENSHOT_AUDIO_DIR + PATHS + ${LIBOPENSHOT_AUDIO_DIR} + PATH_SUFFIXES + include/libopenshot-audio + libopenshot-audio + include + NO_DEFAULT_PATH +) + +# Find the libopenshot-audio header files (fallback to std. paths) +find_path( + LIBOPENSHOT_AUDIO_INCLUDE_DIR + JuceHeader.h + HINTS + ENV LIBOPENSHOT_AUDIO_DIR + PATHS + ${LIBOPENSHOT_AUDIO_DIR} + PATH_SUFFIXES + include/libopenshot-audio + libopenshot-audio + include +) -# Find the base directory of juce includes -find_path(LIBOPENSHOT_AUDIO_BASE_DIR JuceHeader.h - PATHS $ENV{LIBOPENSHOT_AUDIO_DIR}/include/libopenshot-audio/ - /usr/include/libopenshot-audio/ - /usr/local/include/libopenshot-audio/ ) - -# Get a list of all header file paths -FILE(GLOB_RECURSE JUCE_HEADER_FILES - ${LIBOPENSHOT_AUDIO_BASE_DIR}/*.h +# Find libopenshot-audio.so / libopenshot-audio.dll (check env/cache vars first) +find_library( + LIBOPENSHOT_AUDIO_LIBRARY + NAMES + libopenshot-audio + openshot-audio + HINTS + ENV LIBOPENSHOT_AUDIO_DIR + PATHS + ${LIBOPENSHOT_AUDIO_DIR} + PATH_SUFFIXES + lib/libopenshot-audio + libopenshot-audio + lib + NO_DEFAULT_PATH ) -# Loop through each header file -FOREACH(HEADER_PATH ${JUCE_HEADER_FILES}) - # Get the directory of each header file - get_filename_component(HEADER_DIRECTORY ${HEADER_PATH} - PATH - ) - - # Append each directory into the HEADER_DIRECTORIES list - LIST(APPEND HEADER_DIRECTORIES ${HEADER_DIRECTORY}) -ENDFOREACH(HEADER_PATH) - -# Remove duplicates from the header directories list -LIST(REMOVE_DUPLICATES HEADER_DIRECTORIES) - -# Find the libopenshot-audio.so (check env var first) -find_library(LIBOPENSHOT_AUDIO_LIBRARY - NAMES libopenshot-audio openshot-audio - PATHS $ENV{LIBOPENSHOT_AUDIO_DIR}/lib/ NO_DEFAULT_PATH) - -# Find the libopenshot-audio.so / libopenshot-audio.dll library (fallback) -find_library(LIBOPENSHOT_AUDIO_LIBRARY - NAMES libopenshot-audio openshot-audio - HINTS $ENV{LIBOPENSHOT_AUDIO_DIR}/lib/ - /usr/lib/ - /usr/lib/libopenshot-audio/ - /usr/local/lib/ ) - -set(LIBOPENSHOT_AUDIO_LIBRARIES ${LIBOPENSHOT_AUDIO_LIBRARY}) -set(LIBOPENSHOT_AUDIO_LIBRARY ${LIBOPENSHOT_AUDIO_LIBRARIES}) - -# Seems to work fine with just the base dir (rather than all the actual include folders) -set(LIBOPENSHOT_AUDIO_INCLUDE_DIR ${LIBOPENSHOT_AUDIO_BASE_DIR} ) -set(LIBOPENSHOT_AUDIO_INCLUDE_DIRS ${LIBOPENSHOT_AUDIO_BASE_DIR} ) +# Find libopenshot-audio.so / libopenshot-audio.dll (fallback) +find_library( + LIBOPENSHOT_AUDIO_LIBRARY + NAMES + libopenshot-audio + openshot-audio + HINTS + ENV LIBOPENSHOT_AUDIO_DIR + PATHS + ${LIBOPENSHOT_AUDIO_DIR} + PATH_SUFFIXES + lib/libopenshot-audio + libopenshot-audio + lib +) + +set(LIBOPENSHOT_AUDIO_LIBRARIES "${LIBOPENSHOT_AUDIO_LIBRARY}") +set(LIBOPENSHOT_AUDIO_LIBRARY "${LIBOPENSHOT_AUDIO_LIBRARIES}") +set(LIBOPENSHOT_AUDIO_INCLUDE_DIRS "${LIBOPENSHOT_AUDIO_INCLUDE_DIR}") + +if(LIBOPENSHOT_AUDIO_INCLUDE_DIR AND EXISTS "${LIBOPENSHOT_AUDIO_INCLUDE_DIR}/JuceHeader.h") + file(STRINGS "${LIBOPENSHOT_AUDIO_INCLUDE_DIR}/JuceHeader.h" libosa_version_str + REGEX "versionString.*=.*\"[^\"]+\"") + if(libosa_version_str MATCHES "versionString.*=.*\"([^\"]+)\"") + set(LIBOPENSHOT_AUDIO_VERSION_STRING ${CMAKE_MATCH_1}) + endif() + unset(libosa_version_str) + string(REGEX REPLACE "^([0-9]+\.[0-9]+\.[0-9]+).*$" "\\1" + LIBOPENSHOT_AUDIO_VERSION "${LIBOPENSHOT_AUDIO_VERSION_STRING}") +endif() + +# If we couldn't parse M.N.B version, don't keep any of it +if(NOT LIBOPENSHOT_AUDIO_VERSION) + unset(LIBOPENSHOT_AUDIO_VERSION) + unset(LIBOPENSHOT_AUDIO_VERSION_STRING) +endif() + +# Determine compatibility with requested version in find_package() +if(OpenShotAudio_FIND_VERSION AND LIBOPENSHOT_AUDIO_VERSION) + if("${OpenShotAudio_FIND_VERSION}" STREQUAL "${LIBOPENSHOT_AUDIO_VERSION}") + set(OpenShotAudio_VERSION_EXACT TRUE) + endif() + if("${OpenShotAudio_FIND_VERSION}" VERSION_GREATER "${LIBOPENSHOT_AUDIO_VERSION}") + set(OpenShotAudio_VERSION_COMPATIBLE FALSE) + else() + set(OpenShotAudio_VERSION_COMPATIBLE TRUE) + endif() +endif() include(FindPackageHandleStandardArgs) # handle the QUIETLY and REQUIRED arguments and set LIBOPENSHOT_AUDIO_FOUND to TRUE # if all listed variables are TRUE -find_package_handle_standard_args(LIBOPENSHOT_AUDIO DEFAULT_MSG - LIBOPENSHOT_AUDIO_LIBRARY LIBOPENSHOT_AUDIO_INCLUDE_DIR) +find_package_handle_standard_args(OpenShotAudio + REQUIRED_VARS + LIBOPENSHOT_AUDIO_LIBRARY + LIBOPENSHOT_AUDIO_INCLUDE_DIRS + VERSION_VAR + LIBOPENSHOT_AUDIO_VERSION_STRING +) diff -Nru libopenshot-0.2.2+dfsg1/cmake/Modules/FindPythonLibs.cmake libopenshot-0.2.5+dfsg1/cmake/Modules/FindPythonLibs.cmake --- libopenshot-0.2.2+dfsg1/cmake/Modules/FindPythonLibs.cmake 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/cmake/Modules/FindPythonLibs.cmake 1970-01-01 00:00:00.000000000 +0000 @@ -1,294 +0,0 @@ -#.rst: -# FindPythonLibs -# -------------- -# -# Find python libraries -# -# This module finds if Python is installed and determines where the -# include files and libraries are. It also determines what the name of -# the library is. This code sets the following variables: -# -# :: -# -# PYTHONLIBS_FOUND - have the Python libs been found -# PYTHON_LIBRARIES - path to the python library -# PYTHON_INCLUDE_PATH - path to where Python.h is found (deprecated) -# PYTHON_INCLUDE_DIRS - path to where Python.h is found -# PYTHON_DEBUG_LIBRARIES - path to the debug library (deprecated) -# PYTHONLIBS_VERSION_STRING - version of the Python libs found (since CMake 2.8.8) -# -# -# -# The Python_ADDITIONAL_VERSIONS variable can be used to specify a list -# of version numbers that should be taken into account when searching -# for Python. You need to set this variable before calling -# find_package(PythonLibs). -# -# If you'd like to specify the installation of Python to use, you should -# modify the following cache variables: -# -# :: -# -# PYTHON_LIBRARY - path to the python library -# PYTHON_INCLUDE_DIR - path to where Python.h is found -# -# If also calling find_package(PythonInterp), call find_package(PythonInterp) -# first to get the currently active Python version by default with a consistent -# version of PYTHON_LIBRARIES. - -#============================================================================= -# Copyright 2001-2009 Kitware, Inc. -# -# Distributed under the OSI-approved BSD License (the "License"); -# see accompanying file Copyright.txt for details. -# -# This software is distributed WITHOUT ANY WARRANTY; without even the -# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. -# See the License for more information. -#============================================================================= -# (To distribute this file outside of CMake, substitute the full -# License text for the above reference.) - -include(CMakeFindFrameworks) -# Search for the python framework on Apple. -CMAKE_FIND_FRAMEWORKS(Python) - -set(_PYTHON1_VERSIONS 1.6 1.5) -set(_PYTHON2_VERSIONS 2.7 2.6 2.5 2.4 2.3 2.2 2.1 2.0) -set(_PYTHON3_VERSIONS 3.7 3.6 3.5 3.4 3.3 3.2 3.1 3.0) - -if(PythonLibs_FIND_VERSION) - if(PythonLibs_FIND_VERSION_COUNT GREATER 1) - set(_PYTHON_FIND_MAJ_MIN "${PythonLibs_FIND_VERSION_MAJOR}.${PythonLibs_FIND_VERSION_MINOR}") - unset(_PYTHON_FIND_OTHER_VERSIONS) - if(PythonLibs_FIND_VERSION_EXACT) - if(_PYTHON_FIND_MAJ_MIN STREQUAL PythonLibs_FIND_VERSION) - set(_PYTHON_FIND_OTHER_VERSIONS "${PythonLibs_FIND_VERSION}") - else() - set(_PYTHON_FIND_OTHER_VERSIONS "${PythonLibs_FIND_VERSION}" "${_PYTHON_FIND_MAJ_MIN}") - endif() - else() - foreach(_PYTHON_V ${_PYTHON${PythonLibs_FIND_VERSION_MAJOR}_VERSIONS}) - if(NOT _PYTHON_V VERSION_LESS _PYTHON_FIND_MAJ_MIN) - list(APPEND _PYTHON_FIND_OTHER_VERSIONS ${_PYTHON_V}) - endif() - endforeach() - endif() - unset(_PYTHON_FIND_MAJ_MIN) - else() - set(_PYTHON_FIND_OTHER_VERSIONS ${_PYTHON${PythonLibs_FIND_VERSION_MAJOR}_VERSIONS}) - endif() -else() - set(_PYTHON_FIND_OTHER_VERSIONS ${_PYTHON3_VERSIONS} ${_PYTHON2_VERSIONS} ${_PYTHON1_VERSIONS}) -endif() - -# Set up the versions we know about, in the order we will search. Always add -# the user supplied additional versions to the front. -# If FindPythonInterp has already found the major and minor version, -# insert that version between the user supplied versions and the stock -# version list. -set(_Python_VERSIONS ${Python_ADDITIONAL_VERSIONS}) -if(DEFINED PYTHON_VERSION_MAJOR AND DEFINED PYTHON_VERSION_MINOR) - list(APPEND _Python_VERSIONS ${PYTHON_VERSION_MAJOR}.${PYTHON_VERSION_MINOR}) -endif() -list(APPEND _Python_VERSIONS ${_PYTHON_FIND_OTHER_VERSIONS}) - -unset(_PYTHON_FIND_OTHER_VERSIONS) -unset(_PYTHON1_VERSIONS) -unset(_PYTHON2_VERSIONS) -unset(_PYTHON3_VERSIONS) - -foreach(_CURRENT_VERSION ${_Python_VERSIONS}) - string(REPLACE "." "" _CURRENT_VERSION_NO_DOTS ${_CURRENT_VERSION}) - if(WIN32) - find_library(PYTHON_DEBUG_LIBRARY - NAMES python${_CURRENT_VERSION_NO_DOTS}_d python - PATHS - [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\${_CURRENT_VERSION}\\InstallPath]/libs/Debug - [HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\${_CURRENT_VERSION}\\InstallPath]/libs/Debug - [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\${_CURRENT_VERSION}\\InstallPath]/libs - [HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\${_CURRENT_VERSION}\\InstallPath]/libs - ) - endif() - - find_library(PYTHON_LIBRARY - NAMES - python${_CURRENT_VERSION_NO_DOTS} - python${_CURRENT_VERSION}mu - python${_CURRENT_VERSION}m - python${_CURRENT_VERSION}u - python${_CURRENT_VERSION} - PATHS - [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\${_CURRENT_VERSION}\\InstallPath]/libs - [HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\${_CURRENT_VERSION}\\InstallPath]/libs - # Avoid finding the .dll in the PATH. We want the .lib. - NO_SYSTEM_ENVIRONMENT_PATH - ) - # Look for the static library in the Python config directory - find_library(PYTHON_LIBRARY - NAMES python${_CURRENT_VERSION_NO_DOTS} python${_CURRENT_VERSION} - # Avoid finding the .dll in the PATH. We want the .lib. - NO_SYSTEM_ENVIRONMENT_PATH - # This is where the static library is usually located - PATH_SUFFIXES python${_CURRENT_VERSION}/config - ) - - # For backward compatibility, honour value of PYTHON_INCLUDE_PATH, if - # PYTHON_INCLUDE_DIR is not set. - if(DEFINED PYTHON_INCLUDE_PATH AND NOT DEFINED PYTHON_INCLUDE_DIR) - set(PYTHON_INCLUDE_DIR "${PYTHON_INCLUDE_PATH}" CACHE PATH - "Path to where Python.h is found" FORCE) - endif() - - set(PYTHON_FRAMEWORK_INCLUDES) - if(Python_FRAMEWORKS AND NOT PYTHON_INCLUDE_DIR) - foreach(dir ${Python_FRAMEWORKS}) - set(PYTHON_FRAMEWORK_INCLUDES ${PYTHON_FRAMEWORK_INCLUDES} - ${dir}/Versions/${_CURRENT_VERSION}/include/python${_CURRENT_VERSION}) - endforeach() - endif() - - find_path(PYTHON_INCLUDE_DIR - NAMES Python.h - PATHS - ${PYTHON_FRAMEWORK_INCLUDES} - [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\${_CURRENT_VERSION}\\InstallPath]/include - [HKEY_CURRENT_USER\\SOFTWARE\\Python\\PythonCore\\${_CURRENT_VERSION}\\InstallPath]/include - PATH_SUFFIXES - python${_CURRENT_VERSION}mu - python${_CURRENT_VERSION}m - python${_CURRENT_VERSION}u - python${_CURRENT_VERSION} - ) - - # For backward compatibility, set PYTHON_INCLUDE_PATH. - set(PYTHON_INCLUDE_PATH "${PYTHON_INCLUDE_DIR}") - - if(PYTHON_INCLUDE_DIR AND EXISTS "${PYTHON_INCLUDE_DIR}/patchlevel.h") - file(STRINGS "${PYTHON_INCLUDE_DIR}/patchlevel.h" python_version_str - REGEX "^#define[ \t]+PY_VERSION[ \t]+\"[^\"]+\"") - string(REGEX REPLACE "^#define[ \t]+PY_VERSION[ \t]+\"([^\"]+)\".*" "\\1" - PYTHONLIBS_VERSION_STRING "${python_version_str}") - unset(python_version_str) - endif() - - if(PYTHON_LIBRARY AND PYTHON_INCLUDE_DIR) - break() - endif() -endforeach() - -mark_as_advanced( - PYTHON_DEBUG_LIBRARY - PYTHON_LIBRARY - PYTHON_INCLUDE_DIR -) - -# We use PYTHON_INCLUDE_DIR, PYTHON_LIBRARY and PYTHON_DEBUG_LIBRARY for the -# cache entries because they are meant to specify the location of a single -# library. We now set the variables listed by the documentation for this -# module. -set(PYTHON_INCLUDE_DIRS "${PYTHON_INCLUDE_DIR}") -set(PYTHON_DEBUG_LIBRARIES "${PYTHON_DEBUG_LIBRARY}") - -# These variables have been historically named in this module different from -# what SELECT_LIBRARY_CONFIGURATIONS() expects. -set(PYTHON_LIBRARY_DEBUG "${PYTHON_DEBUG_LIBRARY}") -set(PYTHON_LIBRARY_RELEASE "${PYTHON_LIBRARY}") -include(SelectLibraryConfigurations) -SELECT_LIBRARY_CONFIGURATIONS(PYTHON) -# SELECT_LIBRARY_CONFIGURATIONS() sets ${PREFIX}_FOUND if it has a library. -# Unset this, this prefix doesn't match the module prefix, they are different -# for historical reasons. -unset(PYTHON_FOUND) - -include(FindPackageHandleStandardArgs) -FIND_PACKAGE_HANDLE_STANDARD_ARGS(PythonLibs - REQUIRED_VARS PYTHON_LIBRARIES PYTHON_INCLUDE_DIRS - VERSION_VAR PYTHONLIBS_VERSION_STRING) - -# PYTHON_ADD_MODULE( src1 src2 ... srcN) is used to build modules for python. -# PYTHON_WRITE_MODULES_HEADER() writes a header file you can include -# in your sources to initialize the static python modules -function(PYTHON_ADD_MODULE _NAME ) - get_property(_TARGET_SUPPORTS_SHARED_LIBS - GLOBAL PROPERTY TARGET_SUPPORTS_SHARED_LIBS) - option(PYTHON_ENABLE_MODULE_${_NAME} "Add module ${_NAME}" TRUE) - option(PYTHON_MODULE_${_NAME}_BUILD_SHARED - "Add module ${_NAME} shared" ${_TARGET_SUPPORTS_SHARED_LIBS}) - - # Mark these options as advanced - mark_as_advanced(PYTHON_ENABLE_MODULE_${_NAME} - PYTHON_MODULE_${_NAME}_BUILD_SHARED) - - if(PYTHON_ENABLE_MODULE_${_NAME}) - if(PYTHON_MODULE_${_NAME}_BUILD_SHARED) - set(PY_MODULE_TYPE MODULE) - else() - set(PY_MODULE_TYPE STATIC) - set_property(GLOBAL APPEND PROPERTY PY_STATIC_MODULES_LIST ${_NAME}) - endif() - - set_property(GLOBAL APPEND PROPERTY PY_MODULES_LIST ${_NAME}) - add_library(${_NAME} ${PY_MODULE_TYPE} ${ARGN}) -# target_link_libraries(${_NAME} ${PYTHON_LIBRARIES}) - - if(PYTHON_MODULE_${_NAME}_BUILD_SHARED) - set_target_properties(${_NAME} PROPERTIES PREFIX "${PYTHON_MODULE_PREFIX}") - if(WIN32 AND NOT CYGWIN) - set_target_properties(${_NAME} PROPERTIES SUFFIX ".pyd") - endif() - endif() - - endif() -endfunction() - -function(PYTHON_WRITE_MODULES_HEADER _filename) - - get_property(PY_STATIC_MODULES_LIST GLOBAL PROPERTY PY_STATIC_MODULES_LIST) - - get_filename_component(_name "${_filename}" NAME) - string(REPLACE "." "_" _name "${_name}") - string(TOUPPER ${_name} _nameUpper) - set(_filename ${CMAKE_CURRENT_BINARY_DIR}/${_filename}) - - set(_filenameTmp "${_filename}.in") - file(WRITE ${_filenameTmp} "/*Created by cmake, do not edit, changes will be lost*/\n") - file(APPEND ${_filenameTmp} -"#ifndef ${_nameUpper} -#define ${_nameUpper} - -#include - -#ifdef __cplusplus -extern \"C\" { -#endif /* __cplusplus */ - -") - - foreach(_currentModule ${PY_STATIC_MODULES_LIST}) - file(APPEND ${_filenameTmp} "extern void init${PYTHON_MODULE_PREFIX}${_currentModule}(void);\n\n") - endforeach() - - file(APPEND ${_filenameTmp} -"#ifdef __cplusplus -} -#endif /* __cplusplus */ - -") - - - foreach(_currentModule ${PY_STATIC_MODULES_LIST}) - file(APPEND ${_filenameTmp} "int ${_name}_${_currentModule}(void) \n{\n static char name[]=\"${PYTHON_MODULE_PREFIX}${_currentModule}\"; return PyImport_AppendInittab(name, init${PYTHON_MODULE_PREFIX}${_currentModule});\n}\n\n") - endforeach() - - file(APPEND ${_filenameTmp} "void ${_name}_LoadAllPythonModules(void)\n{\n") - foreach(_currentModule ${PY_STATIC_MODULES_LIST}) - file(APPEND ${_filenameTmp} " ${_name}_${_currentModule}();\n") - endforeach() - file(APPEND ${_filenameTmp} "}\n\n") - file(APPEND ${_filenameTmp} "#ifndef EXCLUDE_LOAD_ALL_FUNCTION\nvoid CMakeLoadAllPythonModules(void)\n{\n ${_name}_LoadAllPythonModules();\n}\n#endif\n\n#endif\n") - -# with configure_file() cmake complains that you may not use a file created using file(WRITE) as input file for configure_file() - execute_process(COMMAND ${CMAKE_COMMAND} -E copy_if_different "${_filenameTmp}" "${_filename}" OUTPUT_QUIET ERROR_QUIET) - -endfunction() diff -Nru libopenshot-0.2.2+dfsg1/cmake/Modules/FindRESVG.cmake libopenshot-0.2.5+dfsg1/cmake/Modules/FindRESVG.cmake --- libopenshot-0.2.2+dfsg1/cmake/Modules/FindRESVG.cmake 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/cmake/Modules/FindRESVG.cmake 2020-03-03 08:00:06.000000000 +0000 @@ -0,0 +1,115 @@ +# vim: ts=2 sw=2 +#[=======================================================================[.rst: +FindRESVG +--------- +Try to find the shared-library build of resvg, the Rust SVG library + +IMPORTED targets +^^^^^^^^^^^^^^^^ + +This module defines :prop_tgt:`IMPORTED` target ``RESVG::resvg`` when +the library and headers are found. + +Result Variables +^^^^^^^^^^^^^^^^ + +This module defines the following variables: + +:: + + RESVG_FOUND - Library and header files found + RESVG_INCLUDE_DIRS - Include directory path + RESVG_LIBRARIES - Link path to the library + RESVG_DEFINITIONS - Compiler switches (currently unused) + +Backwards compatibility +^^^^^^^^^^^^^^^^^^^^^^^ + +For compatibility with previous versions of this module, uppercase names +for FFmpeg and for all components are also recognized, and all-uppercase +versions of the cache variables are also created. + +Control variables +^^^^^^^^^^^^^^^^^ + +The following variables can be used to provide path hints to the module: + +RESVGDIR - Set in the calling CMakeLists.txt or on the command line +ENV{RESVGDIR} - An environment variable in the cmake process context + +Copyright (c) 2020, FeRD (Frank Dana) +#]=======================================================================] +include(FindPackageHandleStandardArgs) + +# CMake 3.4+ only: Convert relative paths to absolute +if(DEFINED RESVGDIR AND CMAKE_VERSION VERSION_GREATER 3.4) + get_filename_component(RESVGDIR "${RESVGDIR}" ABSOLUTE + BASE_DIR ${CMAKE_CURRENT_BINARY_DIR}) +endif() + +find_path(RESVG_INCLUDE_DIRS + ResvgQt.h + PATHS + ${RESVGDIR} + ${RESVGDIR}/include + $ENV{RESVGDIR} + $ENV{RESVGDIR}/include + /usr/include + /usr/local/include + PATH_SUFFIXES + resvg + capi/include + resvg/capi/include +) + +find_library(RESVG_LIBRARIES + NAMES resvg + PATHS + ${RESVGDIR} + ${RESVGDIR}/lib + $ENV{RESVGDIR} + $ENV{RESVGDIR}/lib + /usr/lib + /usr/local/lib + PATH_SUFFIXES + resvg + target/release + resvg/target/release +) + +if (RESVG_INCLUDE_DIRS AND RESVG_LIBRARIES) + set(RESVG_FOUND TRUE) +endif() +set(RESVG_LIBRARIES ${RESVG_LIBRARIES} CACHE STRING "The Resvg library link path") +set(RESVG_INCLUDE_DIRS ${RESVG_INCLUDE_DIRS} CACHE STRING "The Resvg include directories") +set(RESVG_DEFINITIONS "" CACHE STRING "The Resvg CFLAGS") + +mark_as_advanced(RESVG_LIBRARIES RESVG_INCLUDE_DIRS RESVG_DEFINITIONS) + +# Give a nice error message if some of the required vars are missing. +find_package_handle_standard_args(RESVG + "Could NOT find RESVG, using Qt SVG parsing instead" + RESVG_LIBRARIES RESVG_INCLUDE_DIRS ) + +# Export target +if(RESVG_FOUND AND NOT TARGET RESVG::resvg) + message(STATUS "Creating IMPORTED target RESVG::resvg") + if (WIN32) + # Windows mis-links SHARED library targets + add_library(RESVG::resvg UNKNOWN IMPORTED) + else() + # Linux needs SHARED to link because libresvg has no SONAME + add_library(RESVG::resvg SHARED IMPORTED) + set_property(TARGET RESVG::resvg APPEND PROPERTY + IMPORTED_NO_SONAME TRUE) + endif() + + set_property(TARGET RESVG::resvg APPEND PROPERTY + INTERFACE_INCLUDE_DIRECTORIES "${RESVG_INCLUDE_DIRS}") + + set_property(TARGET RESVG::resvg APPEND PROPERTY + INTERFACE_COMPILE_DEFINITIONS "${RESVG_DEFINITIONS}") + + set_property(TARGET RESVG::resvg APPEND PROPERTY + IMPORTED_LOCATION "${RESVG_LIBRARIES}") +endif() diff -Nru libopenshot-0.2.2+dfsg1/cmake/Modules/FindUnitTest++.cmake libopenshot-0.2.5+dfsg1/cmake/Modules/FindUnitTest++.cmake --- libopenshot-0.2.2+dfsg1/cmake/Modules/FindUnitTest++.cmake 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/cmake/Modules/FindUnitTest++.cmake 2020-03-03 08:00:06.000000000 +0000 @@ -1,43 +1,59 @@ -# Locate UNITTEST +# Locate UnitTest++ # This module defines -# UNITTEST++_LIBRARY -# UNITTEST++_FOUND, if false, do not try to link to gdal -# UNITTEST++_INCLUDE_DIR, where to find the headers - -FIND_PATH(UNITTEST++_INCLUDE_DIR UnitTest++.h - ${UNITTEST_DIR}/include/unittest++ - $ENV{UNITTEST_DIR}/include/unittest++ - $ENV{UNITTEST_DIR}/src +# UnitTest++_FOUND, if successful +# UnitTest++_LIBRARIES, the library path +# UnitTest++_INCLUDE_DIRS, where to find the headers + +find_package(PkgConfig QUIET) +if(PKG_CONFIG_FOUND) + pkg_check_modules(PC_UnitTest QUIET UnitTest++) + set(UnitTest++_VERSION ${PC_UnitTest_VERSION}) +endif() + + +FIND_PATH(UnitTest++_INCLUDE_DIRS UnitTest++.h + DOC + "Location of UnitTest++ header files" + PATH_SUFFIXES + unittest++ + UnitTest++ # Fedora, Arch + unittest-cpp # openSUSE + HINTS + ${PC_UnitTest++_INCLUDEDIR} + ${PC_UnitTest++_INCLUDE_DIRS} + PATHS + ${UnitTest++_ROOT} + ${UNITTEST_DIR} + $ENV{UNITTEST_DIR}/src $ENV{UNITTEST_DIR} ~/Library/Frameworks /Library/Frameworks - /usr/local/include - /usr/include - /usr/include/unittest++ - /usr/include/UnitTest++ # Fedora - /usr/include/unittest-cpp # openSUSE - /usr/local/include/UnitTest++/ # Arch - /sw/include # Fink - /opt/local/include # DarwinPorts - /opt/local/include/UnitTest++ - /opt/csw/include # Blastwave - /opt/include + /usr/local + /sw # Fink + /opt + /opt/local # DarwinPorts + /opt/csw # Blastwave [HKEY_LOCAL_MACHINE\\SYSTEM\\CurrentControlSet\\Control\\Session\ Manager\\Environment]/include - /usr/freeware/include + /usr/freeware ) -FIND_LIBRARY(UNITTEST++_LIBRARY - NAMES unittest++ UnitTest++ - PATHS - ${UNITTEST_DIR}/lib - $ENV{UNITTEST_DIR}/lib - $ENV{UNITTEST_DIR}/build +FIND_LIBRARY(UnitTest++_LIBRARIES + NAMES unittest++ UnitTest++ + DOC + "Location of UnitTest++ shared library" + HINTS + ${PC_UnitTest++_LIBDIR} + ${PC_UnitTest++_LIBRARY_DIRS} + PATHS + ${UnitTest++_ROOT} + ${UnitTest++_ROOT}/lib + ${UNITTEST_DIR} $ENV{UNITTEST_DIR} + $ENV{UNITTEST_DIR}/lib + $ENV{UNITTEST_DIR}/build ~/Library/Frameworks /Library/Frameworks /usr/local/lib - /usr/lib - /usr/lib64/ # Fedora /sw/lib /opt/local/lib /opt/csw/lib @@ -46,13 +62,24 @@ /usr/freeware/lib64 ) -SET(UNITTEST++_FOUND "NO") -IF(UNITTEST++_LIBRARY AND UNITTEST++_INCLUDE_DIR) - SET(UNITTEST++_FOUND "YES") -ENDIF(UNITTEST++_LIBRARY AND UNITTEST++_INCLUDE_DIR) +if(UnitTest++_LIBRARIES AND UnitTest++_INCLUDE_DIRS) + set(UnitTest++_FOUND TRUE) +endif() include(FindPackageHandleStandardArgs) -# handle the QUIETLY and REQUIRED arguments and set UNITTEST++_FOUND to TRUE -# if all listed variables are TRUE -find_package_handle_standard_args(UNITTEST++ DEFAULT_MSG - UNITTEST++_LIBRARY UNITTEST++_INCLUDE_DIR) +find_package_handle_standard_args(UnitTest++ + REQUIRED_VARS + UnitTest++_LIBRARIES + UnitTest++_INCLUDE_DIRS + VERSION_VAR + UnitTest++_VERSION +) + +# Excessive backwards-compatibility paranoia +set(UnitTest++_LIBRARY "${UnitTest++_LIBRARIES}" PARENT_SCOPE) +set(UnitTest++_INCLUDE_DIR "${UnitTest++_INCLUDE_DIRS}" PARENT_SCOPE) +# Even more excessive backwards-compatibility paranoia +set(UNITTEST++_FOUND "${UnitTest++_FOUND}" PARENT_SCOPE) +set(UNITTEST++_LIBRARY "${UnitTest++_LIBRARIES}" PARENT_SCOPE) +set(UNITTEST++_INCLUDE_DIR "${UnitTest++_INCLUDE_DIRS}" PARENT_SCOPE) + diff -Nru libopenshot-0.2.2+dfsg1/cmake/Modules/FindZeroMQ.cmake libopenshot-0.2.5+dfsg1/cmake/Modules/FindZeroMQ.cmake --- libopenshot-0.2.2+dfsg1/cmake/Modules/FindZeroMQ.cmake 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/cmake/Modules/FindZeroMQ.cmake 2020-03-03 08:00:06.000000000 +0000 @@ -0,0 +1,34 @@ +set(PKG_CONFIG_USE_CMAKE_PREFIX_PATH ON) +find_package(PkgConfig) +pkg_check_modules(PC_LIBZMQ QUIET libzmq) + +set(ZeroMQ_VERSION ${PC_LIBZMQ_VERSION}) + +find_path(ZeroMQ_INCLUDE_DIR zmq.h + PATHS ${ZeroMQ_DIR}/include + ${PC_LIBZMQ_INCLUDE_DIRS}) + +find_library(ZeroMQ_LIBRARY + NAMES zmq + PATHS ${ZeroMQ_DIR}/lib + ${PC_LIBZMQ_LIBDIR} + ${PC_LIBZMQ_LIBRARY_DIRS}) + +if(ZeroMQ_LIBRARY) + set(ZeroMQ_FOUND ON) +endif() + +set ( ZeroMQ_LIBRARIES ${ZeroMQ_LIBRARY} ) +set ( ZeroMQ_INCLUDE_DIRS ${ZeroMQ_INCLUDE_DIR} ) + +if(NOT TARGET libzmq) + add_library(libzmq UNKNOWN IMPORTED) + set_target_properties(libzmq PROPERTIES + IMPORTED_LOCATION ${ZeroMQ_LIBRARIES} + INTERFACE_INCLUDE_DIRECTORIES ${ZeroMQ_INCLUDE_DIRS}) +endif() + +include ( FindPackageHandleStandardArgs ) +# handle the QUIETLY and REQUIRED arguments and set ZMQ_FOUND to TRUE +# if all listed variables are TRUE +find_package_handle_standard_args ( ZeroMQ DEFAULT_MSG ZeroMQ_LIBRARIES ZeroMQ_INCLUDE_DIRS ) diff -Nru libopenshot-0.2.2+dfsg1/cmake/Modules/UseDoxygen.cmake libopenshot-0.2.5+dfsg1/cmake/Modules/UseDoxygen.cmake --- libopenshot-0.2.2+dfsg1/cmake/Modules/UseDoxygen.cmake 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/cmake/Modules/UseDoxygen.cmake 2020-03-03 08:00:06.000000000 +0000 @@ -1,4 +1,30 @@ -# - Run Doxygen +# Redistribution and use is allowed according to the terms of the New +# BSD license: +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# 3. The name of the author may not be used to endorse or promote products +# derived from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES +# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. +# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT +# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF +# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# - Run Doxygen # # Adds a doxygen target that runs doxygen to generate the html # and optionally the LaTeX API documentation. @@ -48,7 +74,6 @@ # # Redistribution and use is allowed according to the terms of the New # BSD license. -# For details see the accompanying COPYING-CMAKE-SCRIPTS file. # macro(usedoxygen_set_default name value type docstring) @@ -80,12 +105,18 @@ STRING "Additional source files/directories separated by space") set(DOXYFILE_SOURE_DIRS "\"${DOXYFILE_SOURCE_DIR}\" ${DOXYFILE_EXTRA_SOURCES}") - usedoxygen_set_default(DOXYFILE_LATEX YES BOOL "Generate LaTeX API documentation" OFF) + usedoxygen_set_default(DOXYFILE_LATEX OFF BOOL "Generate LaTeX API documentation") usedoxygen_set_default(DOXYFILE_LATEX_DIR "latex" STRING "LaTex output directory") mark_as_advanced(DOXYFILE_OUTPUT_DIR DOXYFILE_HTML_DIR DOXYFILE_LATEX_DIR DOXYFILE_SOURCE_DIR DOXYFILE_EXTRA_SOURCE_DIRS DOXYFILE_IN) + ## Dot + usedoxygen_set_default(DOXYFILE_USE_DOT ON BOOL "Use dot (part of graphviz) to generate graphs") + set(DOXYFILE_DOT "NO") + if(DOXYFILE_USE_DOT AND DOXYGEN_DOT_EXECUTABLE) + set(DOXYFILE_DOT "YES") + endif() set_property(DIRECTORY APPEND PROPERTY @@ -100,13 +131,12 @@ ## LaTeX set(DOXYFILE_PDFLATEX "NO") - set(DOXYFILE_DOT "NO") set_property(DIRECTORY APPEND PROPERTY ADDITIONAL_MAKE_CLEAN_FILES "${DOXYFILE_OUTPUT_DIR}/${DOXYFILE_LATEX_DIR}") - if(DOXYFILE_LATEX STREQUAL "ON") + if(DOXYFILE_LATEX) set(DOXYFILE_GENERATE_LATEX "YES") find_package(LATEX) find_program(DOXYFILE_MAKE make) @@ -115,9 +145,6 @@ if(PDFLATEX_COMPILER) set(DOXYFILE_PDFLATEX "YES") endif() - if(DOXYGEN_DOT_EXECUTABLE) - set(DOXYFILE_DOT "YES") - endif() add_custom_command(TARGET doxygen POST_BUILD @@ -134,7 +161,9 @@ configure_file("${DOXYFILE_IN}" "${DOXYFILE}" @ONLY) - get_target_property(DOC_TARGET doc TYPE) + if(TARGET doc) + get_target_property(DOC_TARGET doc TYPE) + endif() if(NOT DOC_TARGET) add_custom_target(doc) endif() diff -Nru libopenshot-0.2.2+dfsg1/cmake/Windows/build-imagemagick.sh libopenshot-0.2.5+dfsg1/cmake/Windows/build-imagemagick.sh --- libopenshot-0.2.2+dfsg1/cmake/Windows/build-imagemagick.sh 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/cmake/Windows/build-imagemagick.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,274 +0,0 @@ -#!/bin/bash -# xml2 build ok but failed test -# libfpx build error - -function ised() { - IN=$1 - shift - tmp=$RANDOM.$$ - <$IN sed "$@" >$tmp && cat $tmp > $IN - rm $tmp -} - -function ask() { - read -p "${1:-Are you sure?]} [Y/n] " response - case $response in - y|Y|"") - true;; - *) - false;; - esac -} - -function download() { - while IFS=\; read url md5 <&3; do - fileName=${url##*/} - - echo "Downloading ${fileName}..." - while true; do - if [[ ! -e $fileName ]]; then - wget ${url} -O ${fileName} - else - echo "File exists!" - fi - - localMd5=$(md5sum ${fileName} | cut -d\ -f1) - - if [[ ${localMd5} != ${md5} ]]; then - ask "Checksum failed. Do you want to download this file again? [Y/n] " - if [[ $? -ne 0 ]]; then - exit 1 - fi - rm ${fileName} - else - break - fi - done - done 3< urls.txt -} - -function extract() { - file=$1 - if [[ ! -e ${file} ]]; then - return - fi - - case $file in - *.tar.gz) - tar xzf $file - ;; - *.tar.xz|*.tar.lzma) - tar xJf $file - ;; - *.tar.bz2) - tar xjf $file - ;; - *) - "Don't know how to extract $file" - esac -} - -function isLibsInstalled() { - libs="$@" - notfound=false - for l in "${libs}"; do - ld -L/usr/local/lib -l"${l}" 2>/dev/null - if [[ $? -ne 0 ]]; then - notfound=true - fi - done - - ! ${notfound} -} - -function isDirExists() { - dir="$@" - found=false - for d in ${dir}; do - if [[ -d "${d}" ]]; then - found=true - break - fi - done - - ${found} -} - -function extractIfNeeded() { - file=$1 - isDirExists ${file%%-*}-* - if [[ $? -ne 0 ]]; then - echo "Extracting $file" - extract $file - fi -} - -function buildbzip2() { - if isLibsInstalled "bz2"; then - if ask "Found bzip2 installed. Do you want to reinstall it?"; then : - else - return 0 - fi - fi - - extractIfNeeded bzip2-*.tar.lzma - - cd bzip2-*/ - tar xzf bzip2-1.0.6.tar.gz - tar xzf cygming-autotools-buildfiles.tar.gz - cd bzip2-*/ - autoconf - mkdir ../build - cd ../build - ../bzip2-*/configure - make - make install - cd ../.. -} - -function buildzlib() { - if isLibsInstalled "z"; then - if ask "Found zlib installed. Do you want to reinstall it?"; then : - else - return 0 - fi - fi - - extractIfNeeded zlib-*.tar.xz - - cd zlib-*/ - INCLUDE_PATH=/usr/local/include LIBRARY_PATH=/usr/local/lib BINARY_PATH=/usr/local/bin make install -f win32/Makefile.gcc SHARED_MODE=1 - cd .. -} - -function buildlibxml2() { - if isLibsInstalled "xml2"; then - if ask "Found libxml2 installed. Do you want to reinstall it?"; then : - else - return 0 - fi - fi - extractIfNeeded libxml2-*.tar.gz - cd libxml2-*/win32/ - ised configure.js 's/dirSep = "\\\\";/dirSep = "\/";/' - cscript.exe configure.js compiler=mingw prefix=/usr/local - # ised ../dict.c '/typedef.*uint32_t;$/d' - ised Makefile.mingw 's/cmd.exe \/C "\?if not exist \(.*\) mkdir \1"\?/mkdir -p \1/' - ised Makefile.mingw 's/cmd.exe \/C "copy\(.*\)"/cp\1/' - ised Makefile.mingw '/cp/{y/\\/\//;}' - ised Makefile.mingw '/PREFIX/{y/\\/\//;}' - make -f Makefile.mingw - make -f Makefile.mingw install - cd ../../ -} - -function buildlibpng() { - if isLibsInstalled "png"; then - if ask "Found libpng installed. Do you want to reinstall it?"; then : - else - return 0 - fi - fi - - extractIfNeeded libpng-*.tar.xz - - cd libpng-*/ - make -f scripts/makefile.msys - make install -f scripts/makefile.msys - cd .. -} - -function buildjpegsrc() { - if isLibsInstalled "jpeg"; then - if ask "Found jpegsrc installed. Do you want to reinstall it?"; then : - else - return 0 - fi - fi - - extract jpegsrc*.tar.gz - - cd jpeg-*/ - ./configure - make - make install - cd .. -} - -function buildfreetype() { - if isLibsInstalled "freetype"; then - if ask "Found freetype installed. Do you want to reinstall it?"; then : - else - return 0 - fi - fi - extract freetype*.tar.bz2 - - INCLUDE_PATH=/usr/local/include - LIBRARY_PATH=/usr/local/lib - BINARY_PATH=/usr/local/bin - cd freetype-*/ - ./configure - make - make install - cd .. -} - -function buildlibwmf() { - if isLibsInstalled "wmf"; then - if ask "Found libwmf installed. Do you want to reinstall it?"; then : - else - return 0 - fi - fi - extract libwmf*.tar.gz - - cd libwmf-*/ - ./configure CFLAGS="-I/usr/local/include" LDFLAGS="-L/usr/local/lib" - make - make install - cd .. -} - -function buildlibwebp() { - if isLibsInstalled "webp"; then - if ask "Found libwebp installed. Do you want to reinstall it?"; then : - else - return 0 - fi - fi - extract libwebp*.tar.gz - - cd libwebp-*/ - ./configure CFLAGS="-I/usr/local/include" LDFLAGS="-L/usr/local/lib" - make - make install - cd .. -} - -function buildDelegate() { - delegates="bzip2 zlib libxml2 libpng jpegsrc freetype libwmf libwebp" - for d in ${delegates}; do - echo "**********************************************************" - echo "Building $d" - build${d} - done -} - -function build() { - extractIfNeeded ImageMagick-*.tar.xz - - local oldPwd=$(pwd -L) - cd ImageMagick-*/ - # patch configure - #sed -i 's/${GDI32_LIBS}x" !=/${GDI32_LIBS} ==/' configure - ised configure 's/${GDI32_LIBS}x" !=/${GDI32_LIBS} ==/' - ./configure --enable-shared --disable-static --enable-delegate-build --without-modules CFLAGS="-I/usr/local/include" LDFLAGS="-L/usr/local/lib" - make - make install - cd ${oldPwd} -} - -download -buildDelegate -build diff -Nru libopenshot-0.2.2+dfsg1/cmake/Windows/README libopenshot-0.2.5+dfsg1/cmake/Windows/README --- libopenshot-0.2.2+dfsg1/cmake/Windows/README 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/cmake/Windows/README 1970-01-01 00:00:00.000000000 +0000 @@ -1,85 +0,0 @@ -#################################################################### - Install Dependencies for Windows -#################################################################### - -Most Windows dependencies needed for libopenshot-audio, libopenshot, and openshot-qt -can be installed easily with MSYS2 and the pacman package manager. Follow these -directions to setup a Windows build environment for OpenShot: - -#################################################################### - -1) Install MSYS2 (http://www.msys2.org/) - -2) Run MSYS2 command prompt (for example: C:\msys64\msys2_shell.cmd) - -3) Append PATH (so MSYS2 can find executables and libraries): - $ PATH=$PATH:/c/msys64/mingw64/bin:/c/msys64/mingw64/lib (64-bit PATH) - or - $ PATH=$PATH:/c/msys32/mingw32/bin:/c/msys32/mingw32/lib (32-bit PATH) - -4) Update and upgrade all packages - $ pacman -Syu - -5a) Install the following packages: -*** for 64-BIT support *** - - $ pacman -S --needed base-devel mingw-w64-x86_64-toolchain - $ pacman -S mingw64/mingw-w64-x86_64-ffmpeg - $ pacman -S mingw64/mingw-w64-x86_64-python3-pyqt5 - $ pacman -S mingw64/mingw-w64-x86_64-swig - $ pacman -S mingw64/mingw-w64-x86_64-cmake - $ pacman -S mingw64/mingw-w64-x86_64-doxygen - $ pacman -S mingw64/mingw-w64-x86_64-python3-pip - $ pacman -S mingw32/mingw-w64-i686-zeromq - $ pacman -S mingw64/mingw-w64-x86_64-python3-pyzmq - $ pacman -S mingw64/mingw-w64-x86_64-python3-cx_Freeze - $ pacman -S git - - Install ImageMagick if needed (OPTIONAL and NOT NEEDED) - $ pacman -S mingw64/mingw-w64-x86_64-imagemagick - -5b) Install the following packages: -*** for 32-BIT support *** - - $ pacman -S --needed base-devel mingw32/mingw-w64-i686-toolchain - $ pacman -S mingw32/mingw-w64-i686-ffmpeg - $ pacman -S mingw32/mingw-w64-i686-python3-pyqt5 - $ pacman -S mingw32/mingw-w64-i686-swig - $ pacman -S mingw32/mingw-w64-i686-cmake - $ pacman -S mingw32/mingw-w64-i686-doxygen - $ pacman -S mingw32/mingw-w64-i686-python3-pip - $ pacman -S mingw32/mingw-w64-i686-zeromq - $ pacman -S mingw32/mingw-w64-i686-python3-pyzmq - $ pacman -S mingw32/mingw-w64-i686-python3-cx_Freeze - $ pacman -S git - - Install ImageMagick if needed (OPTIONAL and NOT NEEDED) - $ pacman -S mingw32/mingw-w32-x86_32-imagemagick - -6) Install Python PIP Dependencies - $ pip3 install httplib2 - $ pip3 install slacker - $ pip3 install tinys3 - $ pip3 install github3.py - $ pip3 install requests - -7) Download Unittest++ (https://github.com/unittest-cpp/unittest-cpp) into /c/home/USER/unittest-cpp-master/ - Configure Unittest++: - $ cmake -G "MSYS Makefiles" ../ -DCMAKE_MAKE_PROGRAM=mingw32-make -DCMAKE_INSTALL_PREFIX:PATH=/usr - Build Unittest++ - $ mingw32-make install - -8) ZMQ++ Header (This might not be needed anymore) - NOTE: Download and copy zmq.hpp into the /c/msys64/mingw64/include/ folder - - -#################################################################### - OPTIONAL: Installing ImageMagick on Windows -#################################################################### - -If you would rather install ImageMagick from source code yourself, follow these steps: - -Step 1) Copy [build-imagemagick.sh and urls.txt] into your local MSYS2 environment -Step 2) Run MSYS2 Shell -Step 3) Execute this command - $ ./build-imagemagick.sh \ No newline at end of file diff -Nru libopenshot-0.2.2+dfsg1/cmake/Windows/urls.txt libopenshot-0.2.5+dfsg1/cmake/Windows/urls.txt --- libopenshot-0.2.2+dfsg1/cmake/Windows/urls.txt 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/cmake/Windows/urls.txt 1970-01-01 00:00:00.000000000 +0000 @@ -1,10 +0,0 @@ -ftp://ftp.imagemagick.org/pub/ImageMagick/releases/ImageMagick-6.8.8-10.tar.xz;ab9b397c1d4798a9f6ae6cc94aa292fe -ftp://ftp.imagemagick.org/pub/ImageMagick/delegates/libpng-1.6.20.tar.xz;3968acb7c66ef81a9dab867f35d0eb4b -ftp://ftp.imagemagick.org/pub/ImageMagick/delegates/libwebp-0.4.4.tar.gz;b737062cf688e502b940b460ddc3015f -ftp://ftp.imagemagick.org/pub/ImageMagick/delegates/libwmf-0.2.8.4.tar.gz;d1177739bf1ceb07f57421f0cee191e0 -ftp://ftp.imagemagick.org/pub/ImageMagick/delegates/libxml2-2.9.3.tar.gz;daece17e045f1c107610e137ab50c179 -ftp://ftp.imagemagick.org/pub/ImageMagick/delegates/zlib-1.2.8.tar.xz;28f1205d8dd2001f26fec1e8c2cebe37 -ftp://ftp.imagemagick.org/pub/ImageMagick/delegates/freetype-2.6.2.tar.bz2;86109d0c998787d81ac582bad9adf82e -http://ncu.dl.sourceforge.net/project/mingw/MinGW/Extension/bzip2/bzip2-1.0.6-4/bzip2-1.0.6-4-mingw32-src.tar.lzma;2a25de4331d1e6e1458d8632dff55fad -ftp://ftp.imagemagick.org/pub/ImageMagick/delegates/libfpx-1.3.1-4.tar.xz;65e2cf8dcf230ad0b90aead35553bbda -ftp://ftp.imagemagick.org/pub/ImageMagick/delegates/jpegsrc.v9a.tar.gz;3353992aecaee1805ef4109aadd433e7 diff -Nru libopenshot-0.2.2+dfsg1/CMakeLists.txt libopenshot-0.2.5+dfsg1/CMakeLists.txt --- libopenshot-0.2.2+dfsg1/CMakeLists.txt 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/CMakeLists.txt 2020-03-03 08:00:06.000000000 +0000 @@ -4,10 +4,10 @@ # # @section LICENSE # -# Copyright (c) 2008-2014 OpenShot Studios, LLC +# Copyright (c) 2008-2019 OpenShot Studios, LLC # . This file is part of -# OpenShot Library (libopenshot), an open-source project dedicated to -# delivering high quality video editing and animation solutions to the +# OpenShot Library (libopenshot), an open-source project dedicated to +# delivering high quality video editing and animation solutions to the # world. For more information visit . # # OpenShot Library (libopenshot) is free software: you can redistribute it @@ -24,67 +24,160 @@ # along with OpenShot Library. If not, see . ################################################################################ -cmake_minimum_required(VERSION 2.8.11) +cmake_minimum_required(VERSION 3.2...3.14 FATAL_ERROR) -MESSAGE("--------------------------------------------------------------") -MESSAGE("Welcome to the OpenShot Build System! CMake will now check for all required build") -MESSAGE("dependencies and notify you of any missing files or other issues. If you have any") -MESSAGE("questions or issues, please visit .") +message("\ +----------------------------------------------------------------- + Welcome to the OpenShot Build System! + +CMake will now check libopenshot's build dependencies and inform +you of any missing files or other issues. + +For more information, please visit . +-----------------------------------------------------------------") ################ ADD CMAKE MODULES ################## set(CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/cmake/Modules") -################ GET VERSION INFORMATION FROM VERSION.H ################## -MESSAGE("--------------------------------------------------------------") -MESSAGE("Determining Version Number (from Version.h file)") - -#### Get the lines related to libopenshot version from the Version.h header -file(STRINGS ${CMAKE_CURRENT_SOURCE_DIR}/include/Version.h OPENSHOT_VERSION_LINES - REGEX "#define[ ]+OPENSHOT_VERSION_.*[0-9]+;.*") - -#### Set each line into it's own variable -list (GET OPENSHOT_VERSION_LINES 0 LINE_MAJOR) -list (GET OPENSHOT_VERSION_LINES 1 LINE_MINOR) -list (GET OPENSHOT_VERSION_LINES 2 LINE_BUILD) -list (GET OPENSHOT_VERSION_LINES 3 LINE_SO) - -#### Get the version number out of each line -STRING(REGEX REPLACE "#define[ ]+OPENSHOT_VERSION_MAJOR[ ]+([0-9]+);(.*)" "\\1" MAJOR_VERSION "${LINE_MAJOR}") -STRING(REGEX REPLACE "#define[ ]+OPENSHOT_VERSION_MINOR[ ]+([0-9]+);(.*)" "\\1" MINOR_VERSION "${LINE_MINOR}") -STRING(REGEX REPLACE "#define[ ]+OPENSHOT_VERSION_BUILD[ ]+([0-9]+);(.*)" "\\1" BUILD_VERSION "${LINE_BUILD}") -STRING(REGEX REPLACE "#define[ ]+OPENSHOT_VERSION_SO[ ]+([0-9]+);(.*)" "\\1" SO_VERSION "${LINE_SO}") -set(PROJECT_VERSION "${MAJOR_VERSION}.${MINOR_VERSION}.${BUILD_VERSION}") - -MESSAGE("--> MAJOR Version: ${MAJOR_VERSION}") -MESSAGE("--> MINOR Version: ${MINOR_VERSION}") -MESSAGE("--> BUILD Version: ${BUILD_VERSION}") -MESSAGE("--> SO/API/ABI Version: ${SO_VERSION}") -MESSAGE("--> VERSION: ${PROJECT_VERSION}") -MESSAGE("") +################ PROJECT VERSION #################### +set(PROJECT_VERSION_FULL "0.2.5") +set(PROJECT_SO_VERSION 19) + +# Remove the dash and anything following, to get the #.#.# version for project() +STRING(REGEX REPLACE "\-.*$" "" VERSION_NUM "${PROJECT_VERSION_FULL}") ################### SETUP PROJECT ################### -PROJECT(openshot) -MESSAGE("--------------------------------------------------------------") -MESSAGE("Generating build files for ${PROJECT_NAME} (${PROJECT_VERSION})") +# This will define the following variables +# PROJECT_NAME +# PROJECT_VERSION, libopenshot_VERSION +# PROJECT_VERSION_MAJOR, libopenshot_VERSION_MAJOR +# PROJECT_VERSION_MINOR, libopenshot_VERSION_MINOR +# PROJECT_VERSION_PATCH, libopenshot_VERSION_PATCH +PROJECT(libopenshot LANGUAGES C CXX VERSION ${VERSION_NUM}) + +message(" +Generating build files for OpenShot with CMake ${CMAKE_VERSION} + Building ${PROJECT_NAME} (version ${PROJECT_VERSION}) + SO/API/ABI Version: ${PROJECT_SO_VERSION} +") + +# Define install paths according to system conventions +# XXX: This must be AFTER THE PROJECT() COMMAND w/ languages enabled, +# in order to properly configure CMAKE_INSTALL_LIBDIR path +include(GNUInstallDirs) + +# Collect and display summary of options/dependencies +include(FeatureSummary) + +################ OPTIONS ################## +# Optional build settings for libopenshot +option(USE_SYSTEM_JSONCPP "Use system installed JsonCpp, if found" ON) +option(DISABLE_BUNDLED_JSONCPP "Don't fall back to bundled JsonCpp" OFF) +option(ENABLE_IWYU "Enable 'Include What You Use' scanner (CMake 3.3+)" OFF) +option(ENABLE_TESTS "Build unit tests (requires UnitTest++)" ON) +option(ENABLE_DOCS "Build API documentation (requires Doxygen)" ON) + +# Legacy commandline override +if (DISABLE_TESTS) + if(ENABLE_COVERAGE) + message(WARNING "ENABLE_COVERAGE requires tests, overriding DISABLE_TESTS") + set(ENABLE_TESTS ON) + else() + set(ENABLE_TESTS OFF) + endif() +endif() + +if(DEFINED ENABLE_TESTS) + set(ENABLE_TESTS ${ENABLE_TESTS} CACHE BOOL "Build unit tests (requires UnitTest++)" FORCE) +endif() + +########## Configure Version.h header ############## +configure_file(include/OpenShotVersion.h.in include/OpenShotVersion.h @ONLY) +# We'll want that installed later +install(FILES ${CMAKE_CURRENT_BINARY_DIR}/include/OpenShotVersion.h + DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/libopenshot) + +#### Work around a GCC < 9 bug with handling of _Pragma() in macros +#### See https://gcc.gnu.org/bugzilla/show_bug.cgi?id=55578 +if ((${CMAKE_CXX_COMPILER_ID} STREQUAL "GNU") AND + (${CMAKE_CXX_COMPILER_VERSION} VERSION_LESS "9.0.0")) + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -no-integrated-cpp") +endif() #### Enable C++11 (for std::shared_ptr support) -set(CMAKE_CXX_FLAGS "-std=c++11") +set(CMAKE_CXX_STANDARD 11) +set(CMAKE_CXX_STANDARD_REQUIRED ON) +set(CMAKE_CXX_EXTENSIONS OFF) IF (WIN32) SET_PROPERTY(GLOBAL PROPERTY WIN32 "WIN32") ENDIF(WIN32) -############## FIND ALL QT RELATED HEADERS ############## -set(QT_HEADER_DIR ${CMAKE_CURRENT_SOURCE_DIR}/include/Qt) -FILE(GLOB QT_HEADER_FILES "${QT_HEADER_DIR}/*.h") +include_directories( + ${CMAKE_CURRENT_SOURCE_DIR}/include + ${CMAKE_CURRENT_BINARY_DIR}/include) + +############## Code Coverage ######################### +if (DISABLE_TESTS AND ENABLE_COVERAGE) + message(WARNING "ENABLE_COVERAGE requires tests, overriding DISABLE_TESTS") + set(DISABLE_TESTS OFF CACHE BOOL "Don't build unit tests" FORCE) +endif() + +if (ENABLE_COVERAGE) + if (NOT CMAKE_BUILD_TYPE) + set(CMAKE_BUILD_TYPE "Debug") + message(STATUS "Coverage enabled, setting build type to Debug") + endif() + include(CodeCoverage) + append_coverage_compiler_flags() +endif() +add_feature_info("Coverage" ENABLE_COVERAGE "analyze test coverage and generate report") -############## PROCESS SUB-DIRECTORIES ############## +############## PROCESS src/ DIRECTORIES ############## add_subdirectory(src) -add_subdirectory(tests) ################### DOCUMENTATION ################### # Find Doxygen (used for documentation) -include(cmake/Modules/UseDoxygen.cmake) - -file(GLOB_RECURSE doc_files ${CMAKE_CURRENT_BINARY_DIR}/doc/html/*.*) -INSTALL(FILES ${doc_files} DESTINATION share/doc/libopenshot) +set(DOCS_ENABLED FALSE) # Only set true if Doxygen is found and configured +if (ENABLE_DOCS) + include(cmake/Modules/UseDoxygen.cmake) + + # Doxygen was found + if (TARGET doc) + message(STATUS "Doxygen found, documentation target enabled") + set(DOCS_ENABLED TRUE) + + # Install docs, if the user builds them with `make doc` + install(CODE "MESSAGE(\"Checking for documentation files to install...\")") + install(CODE "MESSAGE(\"(Compile with 'make doc' command, requires Doxygen)\")") + + install(DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/doc/html/ + DESTINATION ${CMAKE_INSTALL_DOCDIR}/API + MESSAGE_NEVER # Don't spew about file copies + OPTIONAL ) # No error if the docs aren't found + endif() +endif() +add_feature_info("Documentation" DOCS_ENABLED "Build API documentation with 'make doc'") + +############# PROCESS tests/ DIRECTORY ############## +if(ENABLE_TESTS) + set(TESTS_ENABLED TRUE) # May be overridden by tests/CMakeLists.txt + add_subdirectory(tests) +endif() +add_feature_info("Unit tests" TESTS_ENABLED "Compile unit tests for library functions") + +############## COVERAGE REPORTING ################# +if (ENABLE_COVERAGE) + setup_target_for_coverage_lcov( + NAME coverage + LCOV_ARGS "--no-external" + EXECUTABLE openshot-test + DEPENDENCIES openshot-test) + message("Generate coverage report with 'make coverage'") +endif() + +########### PRINT FEATURE SUMMARY ############## +feature_summary(WHAT ALL + INCLUDE_QUIET_PACKAGES + FATAL_ON_MISSING_REQUIRED_PACKAGES + DESCRIPTION "Displaying feature summary\n\nBuild configuration:") diff -Nru libopenshot-0.2.2+dfsg1/codecov.yml libopenshot-0.2.5+dfsg1/codecov.yml --- libopenshot-0.2.2+dfsg1/codecov.yml 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/codecov.yml 2020-03-03 08:00:06.000000000 +0000 @@ -0,0 +1,15 @@ +codecov: + branch: default +coverage: + status: + project: + default: + base: pr # Only post a status to pull requests + informational: true # Don't block PRs based on coverage stats (yet?) +ignore: + - "/src/examples" + - "/src/Qt/demo" + - "/thirdparty" + - "/doc" + - "/cmake" + - "/*.md" diff -Nru libopenshot-0.2.2+dfsg1/.cproject libopenshot-0.2.5+dfsg1/.cproject --- libopenshot-0.2.2+dfsg1/.cproject 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/.cproject 1970-01-01 00:00:00.000000000 +0000 @@ -1,405 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - cmake - -G "Unix Makefiles" ../ -D"CMAKE_BUILD_TYPE:STRING=Release" - true - false - true - - - cmake - -G "Unix Makefiles" ../ -D"MAGICKCORE_HDRI_ENABLE=0" -D"MAGICKCORE_QUANTUM_DEPTH=16" -D"OPENSHOT_IMAGEMAGICK_COMPATIBILITY=0" -D"ENABLE_BLACKMAGIC=1" -D"CMAKE_BUILD_TYPE:STRING=Debug" -D"DISABLE_TESTS=0" - - true - false - true - - - make - test - true - false - true - - - make - help - true - false - true - - - make - doc - true - false - true - - - cmake - -DCMAKE_CXX_COMPILER=/usr/local/opt/gcc48/bin/g++-4.8 -DCMAKE_C_COMPILER=/usr/local/opt/gcc48/bin/gcc-4.8 -DCMAKE_PREFIX_PATH=/usr/local/Cellar/qt5/5.1.1/ -DPYTHON_INCLUDE_DIR=/usr/local/Cellar/python3/3.3.2/Frameworks/Python.framework/Versions/3.3/include/python3.3m/ -DPYTHON_LIBRARY=/usr/local/Cellar/python3/3.3.2/Frameworks/Python.framework/Versions/3.3/lib/libpython3.3.dylib -DPython_FRAMEWORKS=/usr/local/Cellar/python3/3.3.2/Frameworks/Python.framework/ ../ -D"CMAKE_BUILD_TYPE:STRING=Debug" - true - false - true - - - cmake - -DCMAKE_CXX_COMPILER=/usr/local/opt/gcc48/bin/g++-4.8 -DCMAKE_C_COMPILER=/usr/local/opt/gcc48/bin/gcc-4.8 -DCMAKE_PREFIX_PATH=/usr/local/Cellar/qt5/5.1.1/ -DPYTHON_INCLUDE_DIR=/usr/local/Cellar/python3/3.3.2/Frameworks/Python.framework/Versions/3.3/include/python3.3m/ -DPYTHON_LIBRARY=/usr/local/Cellar/python3/3.3.2/Frameworks/Python.framework/Versions/3.3/lib/libpython3.3.dylib -DPython_FRAMEWORKS=/usr/local/Cellar/python3/3.3.2/Frameworks/Python.framework/ ../ -D"CMAKE_BUILD_TYPE:STRING=Release" - true - false - true - - - cmake - -G "MinGW Makefiles" ../ -D"CMAKE_BUILD_TYPE:STRING=Debug" - true - false - true - - - cmake - -G "MinGW Makefiles" ../ -D"CMAKE_BUILD_TYPE:STRING=Release" - true - false - true - - - - diff -Nru libopenshot-0.2.2+dfsg1/debian/changelog libopenshot-0.2.5+dfsg1/debian/changelog --- libopenshot-0.2.2+dfsg1/debian/changelog 2020-03-25 17:42:00.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/debian/changelog 2020-06-24 14:39:33.000000000 +0000 @@ -1,21 +1,36 @@ -libopenshot (0.2.2+dfsg1-1ubuntu3) focal; urgency=medium +libopenshot (0.2.5+dfsg1-2) unstable; urgency=medium - * No-change rebuild for libgcc-s1 package name change. + * [21078a8] Fix build-all + * [7fce00f] Add d/clean - -- Matthias Klose Wed, 25 Mar 2020 18:42:00 +0100 + -- Anton Gladky Wed, 24 Jun 2020 16:39:33 +0200 -libopenshot (0.2.2+dfsg1-1ubuntu2) focal; urgency=medium +libopenshot (0.2.5+dfsg1-1) unstable; urgency=medium - * Build using g++-8. - * Search Python 3.8. + * Upload into unstable. (Closes: #955156) - -- Matthias Klose Sat, 25 Jan 2020 04:33:41 +0000 + -- Anton Gladky Tue, 23 Jun 2020 22:46:18 +0200 -libopenshot (0.2.2+dfsg1-1build1) disco; urgency=medium +libopenshot (0.2.5+dfsg1-1~exp1) experimental; urgency=medium - * No-change rebuild to build for python3.7 as the default. + [ Ondřej Nový ] + * [683f32a] Use debhelper-compat instead of debian/compat - -- Matthias Klose Wed, 31 Oct 2018 12:35:32 +0000 + [ Anton Gladky ] + * [60960b8] Remove all patches + * [ecac4d4] Change so-version to 19 + * [5d17ffc] Use secure URI in Homepage field. + * [b5698b3] Set upstream metadata fields: + Bug-Database, Bug-Submit, Repository, Repository-Browse. + * [623db13] New upstream version 0.2.5+dfsg1. (Closes: #925754, #949216) + * [63703f2] Bump Standards-versions to 4.5.0 + * [0b59867] Set compat-level to 13 + * [f82dfb8] Set versioned dependency on libopenshot-audio-dev + * [d94b394] Update paths in docs + * [43a117c] Add myself to uploaders + * [b501294] Add Rules-Requires-Root: no + + -- Anton Gladky Fri, 19 Jun 2020 23:01:51 +0200 libopenshot (0.2.2+dfsg1-1) unstable; urgency=medium diff -Nru libopenshot-0.2.2+dfsg1/debian/clean libopenshot-0.2.5+dfsg1/debian/clean --- libopenshot-0.2.2+dfsg1/debian/clean 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/debian/clean 2020-06-19 21:01:51.000000000 +0000 @@ -0,0 +1 @@ +debian/build/ diff -Nru libopenshot-0.2.2+dfsg1/debian/compat libopenshot-0.2.5+dfsg1/debian/compat --- libopenshot-0.2.2+dfsg1/debian/compat 2018-08-20 21:52:54.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/debian/compat 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -11 diff -Nru libopenshot-0.2.2+dfsg1/debian/control libopenshot-0.2.5+dfsg1/debian/control --- libopenshot-0.2.2+dfsg1/debian/control 2020-01-25 04:33:41.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/debian/control 2020-06-19 21:01:05.000000000 +0000 @@ -1,13 +1,13 @@ Source: libopenshot Maintainer: Debian Multimedia Maintainers -Uploaders: Dr. Tobias Quathamer +Uploaders: Dr. Tobias Quathamer , + Anton Gladky Section: libs +Rules-Requires-Root: no Priority: optional Build-Depends: cmake, - g++-8, - debhelper (>= 11), + debhelper-compat (= 13), dh-python, - dpkg-dev (>= 1.17.14), ffmpeg, libavcodec-dev, libavdevice-dev, @@ -16,7 +16,7 @@ libavutil-dev, libjsoncpp-dev, libmagick++-6.q16-dev, - libopenshot-audio-dev, + libopenshot-audio-dev (>= 0.2.0), libswscale-dev, libunittest++-dev, libzmq3-dev, @@ -26,12 +26,12 @@ qtmultimedia5-dev, swig Build-Depends-Indep: doxygen -Standards-Version: 4.2.1 +Standards-Version: 4.5.0 Vcs-Browser: https://salsa.debian.org/multimedia-team/libopenshot Vcs-Git: https://salsa.debian.org/multimedia-team/libopenshot.git -Homepage: http://www.openshot.org/ +Homepage: https://www.openshot.org/ -Package: libopenshot16 +Package: libopenshot19 Architecture: any Multi-Arch: same Depends: ${misc:Depends}, @@ -49,7 +49,7 @@ Multi-Arch: same Section: libdevel Depends: ${misc:Depends}, - libopenshot16 (= ${binary:Version}) + libopenshot19 (= ${binary:Version}) Suggests: libopenshot-doc Description: development files for the OpenShot video library OpenShot Library (libopenshot) is an open-source project dedicated to diff -Nru libopenshot-0.2.2+dfsg1/debian/copyright libopenshot-0.2.5+dfsg1/debian/copyright --- libopenshot-0.2.2+dfsg1/debian/copyright 2018-08-20 21:52:54.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/debian/copyright 2020-06-19 21:01:51.000000000 +0000 @@ -2,13 +2,11 @@ Upstream-Name: libopenshot Source: https://github.com/OpenShot/libopenshot Comment: To be excluded from the source release: - - doc/*.pdf: binaries without source. - thirdparty/*: vendored dependencies. -Files-Excluded: doc/*.pdf - thirdparty/* +Files-Excluded: thirdparty/* Files: * -Copyright: 2008-2014 OpenShot Studios, LLC +Copyright: 2008-2019 OpenShot Studios, LLC License: LGPL-3+ Files: cmake/Modules/FindPythonLibs.cmake diff -Nru libopenshot-0.2.2+dfsg1/debian/.gitlab-ci.yml libopenshot-0.2.5+dfsg1/debian/.gitlab-ci.yml --- libopenshot-0.2.2+dfsg1/debian/.gitlab-ci.yml 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/debian/.gitlab-ci.yml 2020-06-24 14:30:42.000000000 +0000 @@ -0,0 +1,3 @@ +include: + - https://salsa.debian.org/salsa-ci-team/pipeline/raw/master/salsa-ci.yml + - https://salsa.debian.org/salsa-ci-team/pipeline/raw/master/pipeline-jobs.yml diff -Nru libopenshot-0.2.2+dfsg1/debian/libopenshot16.install libopenshot-0.2.5+dfsg1/debian/libopenshot16.install --- libopenshot-0.2.2+dfsg1/debian/libopenshot16.install 2018-08-21 14:23:24.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/debian/libopenshot16.install 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -usr/lib/*/*.so.* diff -Nru libopenshot-0.2.2+dfsg1/debian/libopenshot19.install libopenshot-0.2.5+dfsg1/debian/libopenshot19.install --- libopenshot-0.2.2+dfsg1/debian/libopenshot19.install 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/debian/libopenshot19.install 2018-09-22 20:41:04.000000000 +0000 @@ -0,0 +1 @@ +usr/lib/*/*.so.* diff -Nru libopenshot-0.2.2+dfsg1/debian/libopenshot-doc.docs libopenshot-0.2.5+dfsg1/debian/libopenshot-doc.docs --- libopenshot-0.2.2+dfsg1/debian/libopenshot-doc.docs 2018-08-20 21:52:54.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/debian/libopenshot-doc.docs 2020-06-19 21:01:51.000000000 +0000 @@ -1 +1 @@ -*/doc/html +debian/build/doc/html/ diff -Nru libopenshot-0.2.2+dfsg1/debian/patches/0003-Fix-failing-tests-by-using-a-fault-tolerance.patch libopenshot-0.2.5+dfsg1/debian/patches/0003-Fix-failing-tests-by-using-a-fault-tolerance.patch --- libopenshot-0.2.2+dfsg1/debian/patches/0003-Fix-failing-tests-by-using-a-fault-tolerance.patch 2018-08-21 14:20:49.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/debian/patches/0003-Fix-failing-tests-by-using-a-fault-tolerance.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,119 +0,0 @@ -From: "Dr. Tobias Quathamer" -Date: Sat, 18 Nov 2017 13:54:22 +0100 -Subject: Fix failing tests by using a fault tolerance - ---- - tests/FFmpegReader_Tests.cpp | 5 ++--- - tests/ImageWriter_Tests.cpp | 8 ++++---- - tests/Timeline_Tests.cpp | 18 +++++++++--------- - 3 files changed, 15 insertions(+), 16 deletions(-) - -diff --git a/tests/FFmpegReader_Tests.cpp b/tests/FFmpegReader_Tests.cpp -index 53563ca..7f7c960 100644 ---- a/tests/FFmpegReader_Tests.cpp -+++ b/tests/FFmpegReader_Tests.cpp -@@ -95,8 +95,8 @@ TEST(FFmpegReader_Check_Video_File) - int pixel_index = 112 * 4; // pixel 112 (4 bytes per pixel) - - // Check image properties on scanline 10, pixel 112 -- CHECK_EQUAL(21, (int)pixels[pixel_index]); -- CHECK_EQUAL(191, (int)pixels[pixel_index + 1]); -+ CHECK_CLOSE(21, (int)pixels[pixel_index], 1); -+ CHECK_CLOSE(191, (int)pixels[pixel_index + 1], 2); - CHECK_EQUAL(0, (int)pixels[pixel_index + 2]); - CHECK_EQUAL(255, (int)pixels[pixel_index + 3]); - -@@ -209,4 +209,3 @@ TEST(FFmpegReader_Multiple_Open_and_Close) - // Close reader - r.Close(); - } -- -diff --git a/tests/ImageWriter_Tests.cpp b/tests/ImageWriter_Tests.cpp -index 107ee39..d10c8bd 100644 ---- a/tests/ImageWriter_Tests.cpp -+++ b/tests/ImageWriter_Tests.cpp -@@ -73,9 +73,9 @@ TEST(ImageWriter_Test_Gif) - int pixel_index = 230 * 4; // pixel 230 (4 bytes per pixel) - - // Check image properties -- CHECK_EQUAL(20, (int)pixels[pixel_index]); -- CHECK_EQUAL(18, (int)pixels[pixel_index + 1]); -- CHECK_EQUAL(11, (int)pixels[pixel_index + 2]); -+ CHECK_CLOSE(20, (int)pixels[pixel_index], 5); -+ CHECK_CLOSE(18, (int)pixels[pixel_index + 1], 2); -+ CHECK_CLOSE(11, (int)pixels[pixel_index + 2], 2); - CHECK_EQUAL(255, (int)pixels[pixel_index + 3]); - } --#endif -\ No newline at end of file -+#endif -diff --git a/tests/Timeline_Tests.cpp b/tests/Timeline_Tests.cpp -index 8c81579..76195e8 100644 ---- a/tests/Timeline_Tests.cpp -+++ b/tests/Timeline_Tests.cpp -@@ -119,8 +119,8 @@ TEST(Timeline_Check_Two_Track_Video) - int pixel_index = 230 * 4; // pixel 230 (4 bytes per pixel) - - // Check image properties -- CHECK_EQUAL(21, (int)f->GetPixels(pixel_row)[pixel_index]); -- CHECK_EQUAL(191, (int)f->GetPixels(pixel_row)[pixel_index + 1]); -+ CHECK_CLOSE(21, (int)f->GetPixels(pixel_row)[pixel_index], 2); -+ CHECK_CLOSE(191, (int)f->GetPixels(pixel_row)[pixel_index + 1], 2); - CHECK_EQUAL(0, (int)f->GetPixels(pixel_row)[pixel_index + 2]); - CHECK_EQUAL(255, (int)f->GetPixels(pixel_row)[pixel_index + 3]); - -@@ -128,7 +128,7 @@ TEST(Timeline_Check_Two_Track_Video) - f = t.GetFrame(2); - - // Check image properties -- CHECK_EQUAL(176, (int)f->GetPixels(pixel_row)[pixel_index]); -+ CHECK_CLOSE(176, (int)f->GetPixels(pixel_row)[pixel_index], 1); - CHECK_EQUAL(0, (int)f->GetPixels(pixel_row)[pixel_index + 1]); - CHECK_EQUAL(186, (int)f->GetPixels(pixel_row)[pixel_index + 2]); - CHECK_EQUAL(255, (int)f->GetPixels(pixel_row)[pixel_index + 3]); -@@ -138,7 +138,7 @@ TEST(Timeline_Check_Two_Track_Video) - - // Check image properties - CHECK_EQUAL(23, (int)f->GetPixels(pixel_row)[pixel_index]); -- CHECK_EQUAL(190, (int)f->GetPixels(pixel_row)[pixel_index + 1]); -+ CHECK_CLOSE(190, (int)f->GetPixels(pixel_row)[pixel_index + 1], 2); - CHECK_EQUAL(0, (int)f->GetPixels(pixel_row)[pixel_index + 2]); - CHECK_EQUAL(255, (int)f->GetPixels(pixel_row)[pixel_index + 3]); - -@@ -147,7 +147,7 @@ TEST(Timeline_Check_Two_Track_Video) - - // Check image properties - CHECK_EQUAL(186, (int)f->GetPixels(pixel_row)[pixel_index]); -- CHECK_EQUAL(106, (int)f->GetPixels(pixel_row)[pixel_index + 1]); -+ CHECK_CLOSE(106, (int)f->GetPixels(pixel_row)[pixel_index + 1], 1); - CHECK_EQUAL(0, (int)f->GetPixels(pixel_row)[pixel_index + 2]); - CHECK_EQUAL(255, (int)f->GetPixels(pixel_row)[pixel_index + 3]); - -@@ -156,7 +156,7 @@ TEST(Timeline_Check_Two_Track_Video) - - // Check image properties - CHECK_EQUAL(23, (int)f->GetPixels(pixel_row)[pixel_index]); -- CHECK_EQUAL(190, (int)f->GetPixels(pixel_row)[pixel_index + 1]); -+ CHECK_CLOSE(190, (int)f->GetPixels(pixel_row)[pixel_index + 1], 2); - CHECK_EQUAL(0, (int)f->GetPixels(pixel_row)[pixel_index + 2]); - CHECK_EQUAL(255, (int)f->GetPixels(pixel_row)[pixel_index + 3]); - -@@ -165,15 +165,15 @@ TEST(Timeline_Check_Two_Track_Video) - - // Check image properties - CHECK_EQUAL(0, (int)f->GetPixels(pixel_row)[pixel_index]); -- CHECK_EQUAL(94, (int)f->GetPixels(pixel_row)[pixel_index + 1]); -- CHECK_EQUAL(186, (int)f->GetPixels(pixel_row)[pixel_index + 2]); -+ CHECK_CLOSE(94, (int)f->GetPixels(pixel_row)[pixel_index + 1], 1); -+ CHECK_CLOSE(186, (int)f->GetPixels(pixel_row)[pixel_index + 2], 1); - CHECK_EQUAL(255, (int)f->GetPixels(pixel_row)[pixel_index + 3]); - - // Get frame - f = t.GetFrame(4); - - // Check image properties -- CHECK_EQUAL(176, (int)f->GetPixels(pixel_row)[pixel_index]); -+ CHECK_CLOSE(176, (int)f->GetPixels(pixel_row)[pixel_index], 1); - CHECK_EQUAL(0, (int)f->GetPixels(pixel_row)[pixel_index + 1]); - CHECK_EQUAL(186, (int)f->GetPixels(pixel_row)[pixel_index + 2]); - CHECK_EQUAL(255, (int)f->GetPixels(pixel_row)[pixel_index + 3]); diff -Nru libopenshot-0.2.2+dfsg1/debian/patches/0004-Add-some-more-fault-tolerance-for-arm64.patch libopenshot-0.2.5+dfsg1/debian/patches/0004-Add-some-more-fault-tolerance-for-arm64.patch --- libopenshot-0.2.2+dfsg1/debian/patches/0004-Add-some-more-fault-tolerance-for-arm64.patch 2018-08-21 14:20:49.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/debian/patches/0004-Add-some-more-fault-tolerance-for-arm64.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,72 +0,0 @@ -From: "Dr. Tobias Quathamer" -Date: Sat, 18 Nov 2017 14:49:53 +0100 -Subject: Add some more fault tolerance for arm64 - ---- - tests/FFmpegReader_Tests.cpp | 4 ++-- - tests/Timeline_Tests.cpp | 10 +++++----- - 2 files changed, 7 insertions(+), 7 deletions(-) - -diff --git a/tests/FFmpegReader_Tests.cpp b/tests/FFmpegReader_Tests.cpp -index 7f7c960..07fc41e 100644 ---- a/tests/FFmpegReader_Tests.cpp -+++ b/tests/FFmpegReader_Tests.cpp -@@ -109,8 +109,8 @@ TEST(FFmpegReader_Check_Video_File) - - // Check image properties on scanline 10, pixel 112 - CHECK_EQUAL(0, (int)pixels[pixel_index]); -- CHECK_EQUAL(96, (int)pixels[pixel_index + 1]); -- CHECK_EQUAL(188, (int)pixels[pixel_index + 2]); -+ CHECK_CLOSE(96, (int)pixels[pixel_index + 1], 1); -+ CHECK_CLOSE(188, (int)pixels[pixel_index + 2], 1); - CHECK_EQUAL(255, (int)pixels[pixel_index + 3]); - - // Close reader -diff --git a/tests/Timeline_Tests.cpp b/tests/Timeline_Tests.cpp -index 76195e8..4d861a6 100644 ---- a/tests/Timeline_Tests.cpp -+++ b/tests/Timeline_Tests.cpp -@@ -130,14 +130,14 @@ TEST(Timeline_Check_Two_Track_Video) - // Check image properties - CHECK_CLOSE(176, (int)f->GetPixels(pixel_row)[pixel_index], 1); - CHECK_EQUAL(0, (int)f->GetPixels(pixel_row)[pixel_index + 1]); -- CHECK_EQUAL(186, (int)f->GetPixels(pixel_row)[pixel_index + 2]); -+ CHECK_CLOSE(186, (int)f->GetPixels(pixel_row)[pixel_index + 2], 1); - CHECK_EQUAL(255, (int)f->GetPixels(pixel_row)[pixel_index + 3]); - - // Get frame - f = t.GetFrame(3); - - // Check image properties -- CHECK_EQUAL(23, (int)f->GetPixels(pixel_row)[pixel_index]); -+ CHECK_CLOSE(23, (int)f->GetPixels(pixel_row)[pixel_index], 1); - CHECK_CLOSE(190, (int)f->GetPixels(pixel_row)[pixel_index + 1], 2); - CHECK_EQUAL(0, (int)f->GetPixels(pixel_row)[pixel_index + 2]); - CHECK_EQUAL(255, (int)f->GetPixels(pixel_row)[pixel_index + 3]); -@@ -146,7 +146,7 @@ TEST(Timeline_Check_Two_Track_Video) - f = t.GetFrame(24); - - // Check image properties -- CHECK_EQUAL(186, (int)f->GetPixels(pixel_row)[pixel_index]); -+ CHECK_CLOSE(186, (int)f->GetPixels(pixel_row)[pixel_index], 1); - CHECK_CLOSE(106, (int)f->GetPixels(pixel_row)[pixel_index + 1], 1); - CHECK_EQUAL(0, (int)f->GetPixels(pixel_row)[pixel_index + 2]); - CHECK_EQUAL(255, (int)f->GetPixels(pixel_row)[pixel_index + 3]); -@@ -155,7 +155,7 @@ TEST(Timeline_Check_Two_Track_Video) - f = t.GetFrame(5); - - // Check image properties -- CHECK_EQUAL(23, (int)f->GetPixels(pixel_row)[pixel_index]); -+ CHECK_CLOSE(23, (int)f->GetPixels(pixel_row)[pixel_index], 1); - CHECK_CLOSE(190, (int)f->GetPixels(pixel_row)[pixel_index + 1], 2); - CHECK_EQUAL(0, (int)f->GetPixels(pixel_row)[pixel_index + 2]); - CHECK_EQUAL(255, (int)f->GetPixels(pixel_row)[pixel_index + 3]); -@@ -175,7 +175,7 @@ TEST(Timeline_Check_Two_Track_Video) - // Check image properties - CHECK_CLOSE(176, (int)f->GetPixels(pixel_row)[pixel_index], 1); - CHECK_EQUAL(0, (int)f->GetPixels(pixel_row)[pixel_index + 1]); -- CHECK_EQUAL(186, (int)f->GetPixels(pixel_row)[pixel_index + 2]); -+ CHECK_CLOSE(186, (int)f->GetPixels(pixel_row)[pixel_index + 2], 1); - CHECK_EQUAL(255, (int)f->GetPixels(pixel_row)[pixel_index + 3]); - - // Close reader diff -Nru libopenshot-0.2.2+dfsg1/debian/patches/Disable-failing-tests.patch libopenshot-0.2.5+dfsg1/debian/patches/Disable-failing-tests.patch --- libopenshot-0.2.2+dfsg1/debian/patches/Disable-failing-tests.patch 2018-08-21 14:20:49.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/debian/patches/Disable-failing-tests.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,26 +0,0 @@ -From: Ghislain Antony Vaillant -Date: Fri, 16 Dec 2016 13:30:35 +0000 -Subject: Disable failing tests - -https://github.com/OpenShot/libopenshot/issues/43 ---- - tests/FFmpegWriter_Tests.cpp | 8 ++++---- - 1 file changed, 4 insertions(+), 4 deletions(-) - -diff --git a/tests/FFmpegWriter_Tests.cpp b/tests/FFmpegWriter_Tests.cpp -index 73357f2..bca786f 100644 ---- a/tests/FFmpegWriter_Tests.cpp -+++ b/tests/FFmpegWriter_Tests.cpp -@@ -73,8 +73,8 @@ TEST(FFmpegWriter_Test_Webm) - int pixel_index = 112 * 4; // pixel 112 (4 bytes per pixel) - - // Check image properties on scanline 10, pixel 112 -- CHECK_EQUAL(23, (int)pixels[pixel_index]); -- CHECK_EQUAL(23, (int)pixels[pixel_index + 1]); -- CHECK_EQUAL(23, (int)pixels[pixel_index + 2]); -- CHECK_EQUAL(255, (int)pixels[pixel_index + 3]); -+ //CHECK_EQUAL(23, (int)pixels[pixel_index]); -+ //CHECK_EQUAL(23, (int)pixels[pixel_index + 1]); -+ //CHECK_EQUAL(23, (int)pixels[pixel_index + 2]); -+ //CHECK_EQUAL(255, (int)pixels[pixel_index + 3]); - } diff -Nru libopenshot-0.2.2+dfsg1/debian/patches/python3.8.diff libopenshot-0.2.5+dfsg1/debian/patches/python3.8.diff --- libopenshot-0.2.2+dfsg1/debian/patches/python3.8.diff 2020-01-25 04:33:41.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/debian/patches/python3.8.diff 1970-01-01 00:00:00.000000000 +0000 @@ -1,13 +0,0 @@ -Index: b/cmake/Modules/FindPythonLibs.cmake -=================================================================== ---- a/cmake/Modules/FindPythonLibs.cmake -+++ b/cmake/Modules/FindPythonLibs.cmake -@@ -55,7 +55,7 @@ CMAKE_FIND_FRAMEWORKS(Python) - - set(_PYTHON1_VERSIONS 1.6 1.5) - set(_PYTHON2_VERSIONS 2.7 2.6 2.5 2.4 2.3 2.2 2.1 2.0) --set(_PYTHON3_VERSIONS 3.7 3.6 3.5 3.4 3.3 3.2 3.1 3.0) -+set(_PYTHON3_VERSIONS 3.8 3.7 3.6 3.5 3.4 3.3 3.2 3.1 3.0) - - if(PythonLibs_FIND_VERSION) - if(PythonLibs_FIND_VERSION_COUNT GREATER 1) diff -Nru libopenshot-0.2.2+dfsg1/debian/patches/series libopenshot-0.2.5+dfsg1/debian/patches/series --- libopenshot-0.2.2+dfsg1/debian/patches/series 2020-01-25 04:33:41.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/debian/patches/series 1970-01-01 00:00:00.000000000 +0000 @@ -1,5 +0,0 @@ -Use-system-libjsoncpp.patch -Disable-failing-tests.patch -0003-Fix-failing-tests-by-using-a-fault-tolerance.patch -0004-Add-some-more-fault-tolerance-for-arm64.patch -python3.8.diff diff -Nru libopenshot-0.2.2+dfsg1/debian/patches/Use-system-libjsoncpp.patch libopenshot-0.2.5+dfsg1/debian/patches/Use-system-libjsoncpp.patch --- libopenshot-0.2.2+dfsg1/debian/patches/Use-system-libjsoncpp.patch 2018-08-21 14:20:49.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/debian/patches/Use-system-libjsoncpp.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,27 +0,0 @@ -From: Ghislain Antony Vaillant -Date: Thu, 15 Dec 2016 21:04:34 +0000 -Subject: Use system libjsoncpp - ---- - tests/CMakeLists.txt | 8 +++++++- - 1 file changed, 7 insertions(+), 1 deletion(-) - -diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt -index 2c45550..60bb40b 100644 ---- a/tests/CMakeLists.txt -+++ b/tests/CMakeLists.txt -@@ -155,7 +155,13 @@ include_directories(${ZMQ_INCLUDE_DIRS}) - - ################### JSONCPP ##################### - # Include jsoncpp headers (needed for JSON parsing) --include_directories("../thirdparty/jsoncpp/include") -+if (USE_SYSTEM_JSONCPP) -+ find_package(JsonCpp REQUIRED) -+ include_directories(${JSONCPP_INCLUDE_DIRS}) -+else () -+ message("Using embedded JsonCpp") -+ include_directories("../thirdparty/jsoncpp/include") -+endif () - - IF (NOT DISABLE_TESTS) - ############### SET TEST SOURCE FILES ################# diff -Nru libopenshot-0.2.2+dfsg1/debian/python3-openshot.install libopenshot-0.2.5+dfsg1/debian/python3-openshot.install --- libopenshot-0.2.2+dfsg1/debian/python3-openshot.install 2018-08-20 21:52:54.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/debian/python3-openshot.install 2018-09-22 20:41:04.000000000 +0000 @@ -1 +1 @@ -usr/local/lib/python*/dist-packages usr/lib/python3 +usr/lib/python*/dist-packages usr/lib/python3 diff -Nru libopenshot-0.2.2+dfsg1/debian/rules libopenshot-0.2.5+dfsg1/debian/rules --- libopenshot-0.2.2+dfsg1/debian/rules 2020-01-25 04:33:41.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/debian/rules 2020-06-19 21:01:51.000000000 +0000 @@ -1,8 +1,5 @@ #!/usr/bin/make -f -export CC = gcc-8 -export CXX = g++-8 - include /usr/share/dpkg/architecture.mk # Uncomment this to turn on verbose mode. @@ -12,9 +9,10 @@ # Possible overlinkage. # see: https://github.com/OpenShot/libopenshot/issues/38 export DEB_LDFLAGS_MAINT_APPEND = -Wl,--as-needed +BUILDDIR = $(CURDIR)/debian/build %: - dh $@ --with python3 + dh $@ --with python3 --builddirectory=$(BUILDDIR) override_dh_auto_configure: dh_auto_configure -- -DLIB_SUFFIX="/$(DEB_HOST_MULTIARCH)" \ @@ -27,8 +25,6 @@ override_dh_auto_test-indep: -override_dh_auto_test-arch: - override_dh_auto_install-indep: override_dh_installdocs-indep: diff -Nru libopenshot-0.2.2+dfsg1/debian/upstream/metadata libopenshot-0.2.5+dfsg1/debian/upstream/metadata --- libopenshot-0.2.2+dfsg1/debian/upstream/metadata 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/debian/upstream/metadata 2018-09-22 20:41:04.000000000 +0000 @@ -0,0 +1,5 @@ +--- +Bug-Database: https://github.com/OpenShot/libopenshot/issues +Bug-Submit: https://github.com/OpenShot/libopenshot/issues/new +Repository: https://github.com/OpenShot/libopenshot.git +Repository-Browse: https://github.com/OpenShot/libopenshot diff -Nru libopenshot-0.2.2+dfsg1/doc/HW-ACCEL.md libopenshot-0.2.5+dfsg1/doc/HW-ACCEL.md --- libopenshot-0.2.2+dfsg1/doc/HW-ACCEL.md 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/doc/HW-ACCEL.md 2020-03-03 08:00:06.000000000 +0000 @@ -0,0 +1,136 @@ +## Hardware Acceleration + +OpenShot now has experimental support for hardware acceleration, which uses 1 (or more) +graphics cards to offload some of the work for both decoding and encoding. This is +very new and experimental (as of May 2019), but we look forward to "accelerating" +our support for this in the future! + +The following table summarizes our current level of support: + +| | Linux Decode | Linux Encode | Mac Decode | Mac Encode | Windows Decode | Windows Encode | Notes | +|--------------------|:---------------:|:--------------:|:----------:|:--------------:|:--------------:|:--------------:|------------------| +| VA-API | ✔️   | ✔️   | - | - | - | - | *Linux Only* | +| VDPAU | ✔️ 1 | ✅ 2 | - | - | - | - | *Linux Only* | +| CUDA (NVDEC/NVENC) | ❌ 3 | ✔️   | - | - | - | ✔️   | *Cross Platform* | +| VideoToolBox | - | - | ✔️   | ❌ 4 | - | - | *Mac Only* | +| DXVA2 | - | - | - | - | ❌ 3 | - | *Windows Only* | +| D3D11VA | - | - | - | - | ❌ 3 | - | *Windows Only* | +| QSV | ❌ 3 | ❌   | ❌   | ❌   | ❌   | ❌   | *Cross Platform* | + +#### Notes + +1. VDPAU for some reason needs a card number one higher than it really is +2. VDPAU is a decoder only +3. Green frames (pixel data not correctly tranferred back to system memory) +4. Crashes and burns + +## Supported FFmpeg Versions + +* HW accel is supported from FFmpeg version 3.4 +* HW accel was removed for nVidia drivers in Ubuntu for FFmpeg 4+ + +**Notice:** The FFmpeg versions of Ubuntu and PPAs for Ubuntu show the +same behaviour. FFmpeg 3 has working nVidia hardware acceleration while +FFmpeg 4+ has no support for nVidia hardware acceleration +included. + +## OpenShot Settings + +The following settings are use by libopenshot to enable, disable, and control +the various hardware acceleration features. + +```{cpp} +/// Use video codec for faster video decoding (if supported) +int HARDWARE_DECODER = 0; + +/* 0 - No acceleration + 1 - Linux VA-API + 2 - nVidia NVDEC + 3 - Windows D3D9 + 4 - Windows D3D11 + 5 - MacOS / VideoToolBox + 6 - Linux VDPAU + 7 - Intel QSV */ + +/// Number of threads of OpenMP +int OMP_THREADS = 12; + +/// Number of threads that FFmpeg uses +int FF_THREADS = 8; + +/// Maximum rows that hardware decode can handle +int DE_LIMIT_HEIGHT_MAX = 1100; + +/// Maximum columns that hardware decode can handle +int DE_LIMIT_WIDTH_MAX = 1950; + +/// Which GPU to use to decode (0 is the first, LINUX ONLY) +int HW_DE_DEVICE_SET = 0; + +/// Which GPU to use to encode (0 is the first, LINUX ONLY) +int HW_EN_DEVICE_SET = 0; +``` + +## Libva / VA-API (Video Acceleration API) + +The correct version of libva is needed (libva in Ubuntu 16.04 or libva2 +in Ubuntu 18.04) for the AppImage to work with hardware acceleration. +An AppImage that works on both systems (supporting libva and libva2), +might be possible when no libva is included in the AppImage. + +* vaapi is working for intel and AMD +* vaapi is working for decode only for nouveau +* nVidia driver is working for export only + +## AMD Graphics Cards (RadeonOpenCompute/ROCm) + +Decoding and encoding on the (AMD) GPU is possible with the default drivers. +On systems where ROCm is installed and run a future use for GPU acceleration +of effects could be implemented (contributions welcome). + +## Multiple Graphics Cards + +If the computer has multiple graphics cards installed, you can choose which +should be used by libopenshot. Also, you can optionally use one card for +decoding and the other for encoding (if both cards support acceleration). +This is currently only supported on Linux, due to the device name FFmpeg +expects (i.e. **/dev/dri/render128**). Contributions welcome if anyone can +determine what string format to pass for Windows and Mac. + +## Help Us Improve Hardware Support + +This information might be wrong, and we would love to continue improving +our support for hardware acceleration in OpenShot. Please help us update +this document if you find an error or discover new and/or useful information. + +**FFmpeg 4 + nVidia** The manual at: +https://www.tal.org/tutorials/ffmpeg_nvidia_encode +works pretty well. We could compile and install a version of FFmpeg 4.1.3 +on Mint 19.1 that supports the GPU on nVidia cards. A version of openshot +with hardware support using these libraries could use the nVidia GPU. + +**BUG:** Hardware supported decoding still has some bugs (as you can see from +the chart above). Also, the speed gains with decoding are not as great +as with encoding. Currently, if hardware decoding fails, there is no +fallback (you either get green frames or an "invalid file" error in OpenShot). +This needs to be improved to successfully fall-back to software decoding. + +**Needed:** + * A way to get options and limits of the GPU, such as + supported dimensions (width and height). + * A way to list the actual Graphic Cards available to FFmpeg (for the + user to choose which card for decoding and encoding, as opposed + to "Graphics Card X") + +**Further improvement:** Right now the frame can be decoded on the GPU, but the +frame is then copied to CPU memory for modifications. It is then copied back to +GPU memory for encoding. Using the GPU for both decoding and modifications +will make it possible to do away with these two copies. A possible solution would +be to use Vulkan compute which would be available on Linux and Windows natively +and on MacOS via MoltenVK. + +## Credit + +A big thanks to Peter M (https://github.com/eisneinechse) for all his work +on integrating hardware acceleration into libopenshot! The community thanks +you for this major contribution! diff -Nru libopenshot-0.2.2+dfsg1/doc/INSTALL-LINUX.md libopenshot-0.2.5+dfsg1/doc/INSTALL-LINUX.md --- libopenshot-0.2.2+dfsg1/doc/INSTALL-LINUX.md 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/doc/INSTALL-LINUX.md 2020-03-03 08:00:06.000000000 +0000 @@ -0,0 +1,227 @@ +# Building libopenshot for Linux + +## Getting Started + +The best way to get started with libopenshot, is to learn about our build system, obtain all the source code, +install a development IDE and tools, and better understand our dependencies. So, please read through the +following sections, and follow the instructions. And keep in mind, that your computer is likely different +than the one used when writing these instructions. Your file paths and versions of applications might be +slightly different, so keep an eye out for subtle file path differences in the commands you type. + +## Build Tools + +CMake is the backbone of our build system. It is a cross-platform build system, which checks for +dependencies, locates header files and libraries, generates makefiles, and supports the cross-platform +compiling of libopenshot and libopenshot-audio. CMake uses an out-of-source build concept, where +all temporary build files, such as makefiles, object files, and even the final binaries, are created +outside of the source code folder, inside a /build/ sub-folder. This prevents the build process +from cluttering up the source code. These instructions have only been tested with the GNU compiler +(including MSYS2/MinGW for Windows). + +## Dependencies + +The following libraries are required to build libopenshot. Instructions on how to install these +dependencies vary for each operating system. Libraries and Executables have been labeled in the +list below to help distinguish between them. + +### FFmpeg (libavformat, libavcodec, libavutil, libavdevice, libavresample, libswscale) + * http://www.ffmpeg.org/ `(Library)` + * This library is used to decode and encode video, audio, and image files. It is also used to obtain information about media files, such as frame rate, sample rate, aspect ratio, and other common attributes. + +### ImageMagick++ (libMagick++, libMagickWand, libMagickCore) + * http://www.imagemagick.org/script/magick++.php `(Library)` + * This library is **optional**, and used to decode and encode images. + +### OpenShot Audio Library (libopenshot-audio) + * https://github.com/OpenShot/libopenshot-audio/ `(Library)` + * This library is used to mix, resample, host plug-ins, and play audio. It is based on the JUCE project, which is an outstanding audio library used by many different applications + +### Qt 5 (libqt5) + * http://www.qt.io/qt5/ `(Library)` + * Qt5 is used to display video, store image data, composite images, apply image effects, and many other utility functions, such as file system manipulation, high resolution timers, etc... + +### CMake (cmake) + * http://www.cmake.org/ `(Executable)` + * This executable is used to automate the generation of Makefiles, check for dependencies, and is the backbone of libopenshot’s cross-platform build process. + +### SWIG (swig) + * http://www.swig.org/ `(Executable)` + * This executable is used to generate the Python and Ruby bindings for libopenshot. It is a simple and powerful wrapper for C++ libraries, and supports many languages. + +### Python 3 (libpython) + * http://www.python.org/ `(Executable and Library)` + * This library is used by swig to create the Python (version 3+) bindings for libopenshot. This is also the official language used by OpenShot Video Editor (a graphical interface to libopenshot). + +### Doxygen (doxygen) + * http://www.stack.nl/~dimitri/doxygen/ `(Executable)` + * This executable is used to auto-generate the documentation used by libopenshot. + +### UnitTest++ (libunittest++) + * https://github.com/unittest-cpp/ `(Library)` + * This library is used to execute unit tests for libopenshot. It contains many macros used to keep our unit testing code very clean and simple. + +### ZeroMQ (libzmq) + * http://zeromq.org/ `(Library)` + * This library is used to communicate between libopenshot and other applications (publisher / subscriber). Primarily used to send debug data from libopenshot. + +### OpenMP (-fopenmp) + * http://openmp.org/wp/ `(Compiler Flag)` + * If your compiler supports this flag (GCC, Clang, and most other compilers), it provides libopenshot with easy methods of using parallel programming techniques to improve performance and take advantage of multi-core processors. + + +## CMake Flags (Optional) +There are many different build flags that can be passed to cmake to adjust how libopenshot is +compiled. Some of these flags might be required when compiling on certain OSes, just depending +on how your build environment is setup. To add a build flag, follow this general syntax: +`cmake -DMAGICKCORE_HDRI_ENABLE=1 -DENABLE_TESTS=1 ../` + +* MAGICKCORE_HDRI_ENABLE (default 0) +* MAGICKCORE_QUANTUM_DEPTH (default 0) +* OPENSHOT_IMAGEMAGICK_COMPATIBILITY (default 0) +* DISABLE_TESTS (default 0) +* CMAKE_PREFIX_PATH (`/location/to/missing/library/`) +* PYTHON_INCLUDE_DIR (`/location/to/python/include/`) +* PYTHON_LIBRARY (`/location/to/python/lib.a`) +* PYTHON_FRAMEWORKS (`/usr/local/Cellar/python3/3.3.2/Frameworks/Python.framework/`) +* CMAKE_CXX_COMPILER (`/location/to/mingw/g++`) +* CMAKE_C_COMPILER (`/location/to/mingw/gcc`) + +## Obtaining Source Code + +The first step in installing libopenshot is to obtain the most recent source code. The source code is +available on [GitHub](https://github.com/OpenShot/libopenshot). Use the following command +to obtain the latest libopenshot source code. + +``` +git clone https://github.com/OpenShot/libopenshot.git +git clone https://github.com/OpenShot/libopenshot-audio.git +``` + +## Folder Structure (libopenshot) + +The source code is divided up into the following folders. + +### build/ + * This folder needs to be manually created, and is used by cmake to store the temporary build files, such as makefiles, as well as the final binaries (library and test executables). + +### cmake/ + * This folder contains custom modules not included by default in cmake, used to find dependency libraries and headers and determine if these libraries are installed. + +### doc/ + * This folder contains documentation and related files, such as logos and images required by the doxygen auto-generated documentation. + +### include/ + * This folder contains all headers (*.h) used by libopenshot. + +### src/ + * This folder contains all source code (*.cpp) used by libopenshot. + +### tests/ + * This folder contains all unit test code. Each class has it’s own test file (*.cpp), and uses UnitTest++ macros to keep the test code simple and manageable. + +### thirdparty/ + * This folder contains code not written by the OpenShot team. For example, jsoncpp, an open-source JSON parser. + +## Install Dependencies + +In order to actually compile libopenshot, we need to install some dependencies on your system. The easiest +way to accomplish this is with our Daily PPA. A PPA is an unofficial Ubuntu repository, which has our +software packages available to download and install. + +``` + sudo add-apt-repository ppa:openshot.developers/libopenshot-daily + sudo apt-get update + sudo apt-get install openshot-qt \ + cmake \ + libx11-dev \ + libasound2-dev \ + libavcodec-dev \ + libavdevice-dev \ + libavfilter-dev \ + libavformat-dev \ + libavresample-dev \ + libavutil-dev \ + libfdk-aac-dev \ + libfreetype6-dev \ + libjsoncpp-dev \ + libmagick++-dev \ + libopenshot-audio-dev \ + libswscale-dev \ + libunittest++-dev \ + libxcursor-dev \ + libxinerama-dev \ + libxrandr-dev \ + libzmq3-dev \ + pkg-config \ + python3-dev \ + qtbase5-dev \ + qtmultimedia5-dev \ + swig +``` + +## Linux Build Instructions (libopenshot-audio) +To compile libopenshot-audio, we need to go through a few additional steps to manually build and +install it. Launch a terminal and enter: + +``` +cd [libopenshot-audio repo folder] +mkdir build +cd build +cmake ../ +make +make install +./src/openshot-audio-test-sound (This should play a test sound) +``` + +## Linux Build Instructions (libopenshot) +Run the following commands to compile libopenshot: + +``` +cd [libopenshot repo directory] +mkdir -p build +cd build +cmake ../ +make +``` + +If you are missing any dependencies for libopenshot, you might receive error messages at this point. +Just install the missing packages (usually with a -dev suffix), and run the above commands again. +Repeat until no error messages are displayed, and the build process completes. Also, if you manually +install Qt 5, you might need to specify the location for cmake: + +``` +cmake -DCMAKE_PREFIX_PATH=/qt5_path/qt5/5.2.0/ ../ +``` + +To run all unit tests (and verify everything is working correctly), launch a terminal, and enter: + +``` +make test +``` + +To auto-generate documentation for libopenshot, launch a terminal, and enter: + +``` +make doc +``` + +This will use doxygen to generate a folder of HTML files, with all classes and methods documented. The +folder is located at **build/doc/html/**. Once libopenshot has been successfully built, we need to +install it (i.e. copy it to the correct folder, so other libraries can find it). + +``` +make install +``` + +This will copy the binary files to /usr/local/lib/, and the header files to /usr/local/include/openshot/... +This is where other projects will look for the libopenshot files when building. Python 3 bindings are +also installed at this point. let's verify the python bindings work: + +``` +python3 +>>> import openshot +``` + +If no errors are displayed, you have successfully compiled and installed libopenshot on your system. +Congratulations and be sure to read our wiki on [Becoming an OpenShot Developer](https://github.com/OpenShot/openshot-qt/wiki/Become-a-Developer)! +Welcome to the OpenShot developer community! We look forward to meeting you! diff -Nru libopenshot-0.2.2+dfsg1/doc/INSTALL-MAC.md libopenshot-0.2.5+dfsg1/doc/INSTALL-MAC.md --- libopenshot-0.2.2+dfsg1/doc/INSTALL-MAC.md 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/doc/INSTALL-MAC.md 2020-03-03 08:00:06.000000000 +0000 @@ -0,0 +1,220 @@ +# Building libopenshot for MacOS + +## Getting Started + +The best way to get started with libopenshot, is to learn about our build system, obtain all the source code, +install a development IDE and tools, and better understand our dependencies. So, please read through the +following sections, and follow the instructions. And keep in mind, that your computer is likely different +than the one used when writing these instructions. Your file paths and versions of applications might be +slightly different, so keep an eye out for subtle file path differences in the commands you type. + +## Build Tools + +CMake is the backbone of our build system. It is a cross-platform build system, which checks for +dependencies, locates header files and libraries, generates makefiles, and supports the cross-platform +compiling of libopenshot and libopenshot-audio. CMake uses an out-of-source build concept, where +all temporary build files, such as makefiles, object files, and even the final binaries, are created +outside of the source code folder, inside a /build/ sub-folder. This prevents the build process +from cluttering up the source code. These instructions have only been tested with the GNU compiler +(including MSYS2/MinGW for Windows). + +## Dependencies + +The following libraries are required to build libopenshot. Instructions on how to install these +dependencies vary for each operating system. Libraries and Executables have been labeled in the +list below to help distinguish between them. + +### FFmpeg (libavformat, libavcodec, libavutil, libavdevice, libavresample, libswscale) + * http://www.ffmpeg.org/ `(Library)` + * This library is used to decode and encode video, audio, and image files. It is also used to obtain information about media files, such as frame rate, sample rate, aspect ratio, and other common attributes. + +### ImageMagick++ (libMagick++, libMagickWand, libMagickCore) + * http://www.imagemagick.org/script/magick++.php `(Library)` + * This library is **optional**, and used to decode and encode images. + +### OpenShot Audio Library (libopenshot-audio) + * https://github.com/OpenShot/libopenshot-audio/ `(Library)` + * This library is used to mix, resample, host plug-ins, and play audio. It is based on the JUCE project, which is an outstanding audio library used by many different applications + +### Qt 5 (libqt5) + * http://www.qt.io/qt5/ `(Library)` + * Qt5 is used to display video, store image data, composite images, apply image effects, and many other utility functions, such as file system manipulation, high resolution timers, etc... + +### CMake (cmake) + * http://www.cmake.org/ `(Executable)` + * This executable is used to automate the generation of Makefiles, check for dependencies, and is the backbone of libopenshot’s cross-platform build process. + +### SWIG (swig) + * http://www.swig.org/ `(Executable)` + * This executable is used to generate the Python and Ruby bindings for libopenshot. It is a simple and powerful wrapper for C++ libraries, and supports many languages. + +### Python 3 (libpython) + * http://www.python.org/ `(Executable and Library)` + * This library is used by swig to create the Python (version 3+) bindings for libopenshot. This is also the official language used by OpenShot Video Editor (a graphical interface to libopenshot). + +### Doxygen (doxygen) + * http://www.stack.nl/~dimitri/doxygen/ `(Executable)` + * This executable is used to auto-generate the documentation used by libopenshot. + +### UnitTest++ (libunittest++) + * https://github.com/unittest-cpp/ `(Library)` + * This library is used to execute unit tests for libopenshot. It contains many macros used to keep our unit testing code very clean and simple. + +### ZeroMQ (libzmq) + * http://zeromq.org/ `(Library)` + * This library is used to communicate between libopenshot and other applications (publisher / subscriber). Primarily used to send debug data from libopenshot. + +### OpenMP (-fopenmp) + * http://openmp.org/wp/ `(Compiler Flag)` + * If your compiler supports this flag (GCC, Clang, and most other compilers), it provides libopenshot with easy methods of using parallel programming techniques to improve performance and take advantage of multi-core processors. + +## CMake Flags (Optional) +There are many different build flags that can be passed to cmake to adjust how libopenshot is compiled. +Some of these flags might be required when compiling on certain OSes, just depending on how your build +environment is setup. To add a build flag, follow this general syntax: +`cmake -DMAGICKCORE_HDRI_ENABLE=1 -DENABLE_TESTS=1 ../` + +* MAGICKCORE_HDRI_ENABLE (default 0) +* MAGICKCORE_QUANTUM_DEPTH (default 0) +* OPENSHOT_IMAGEMAGICK_COMPATIBILITY (default 0) +* DISABLE_TESTS (default 0) +* CMAKE_PREFIX_PATH (`/location/to/missing/library/`) +* PYTHON_INCLUDE_DIR (`/location/to/python/include/`) +* PYTHON_LIBRARY (`/location/to/python/lib.a`) +* PYTHON_FRAMEWORKS (`/usr/local/Cellar/python3/3.3.2/Frameworks/Python.framework/`) +* CMAKE_CXX_COMPILER (`/location/to/mingw/g++`) +* CMAKE_C_COMPILER (`/location/to/mingw/gcc`) + +## Obtaining Source Code + +The first step in installing libopenshot is to obtain the most recent source code. The source code +is available on [GitHub](https://github.com/OpenShot/libopenshot). Use the following command to +obtain the latest libopenshot source code. + +``` +git clone https://github.com/OpenShot/libopenshot.git +git clone https://github.com/OpenShot/libopenshot-audio.git +``` + +## Folder Structure (libopenshot) + +The source code is divided up into the following folders. + +### build/ + * This folder needs to be manually created, and is used by cmake to store the temporary build files, such as makefiles, as well as the final binaries (library and test executables). + +### cmake/ + * This folder contains custom modules not included by default in cmake, used to find dependency libraries and headers and determine if these libraries are installed. + +### doc/ + * This folder contains documentation and related files, such as logos and images required by the doxygen auto-generated documentation. + +### include/ + * This folder contains all headers (*.h) used by libopenshot. + +### src/ + * This folder contains all source code (*.cpp) used by libopenshot. + +### tests/ + * This folder contains all unit test code. Each class has it’s own test file (*.cpp), and uses UnitTest++ macros to keep the test code simple and manageable. + +### thirdparty/ + * This folder contains code not written by the OpenShot team. For example, jsoncpp, an open-source JSON parser. + +## Install Dependencies + +In order to actually compile libopenshot and libopenshot-audio, we need to install some dependencies on +your system. Most packages needed by libopenshot can be installed easily with Homebrew. However, first +install Xcode with the following options ("UNIX Development", "System Tools", "Command Line Tools", or +"Command Line Support"). Be sure to refresh your list of Homebrew packages with the “brew update” command. + +**NOTE:** Homebrew seems to work much better for most users (compared to MacPorts), so I am going to +focus on brew for this guide. + +Install the following packages using the Homebrew package installer (http://brew.sh/). Pay close attention +to any warnings or errors during these brew installs. NOTE: You might have some conflicting libraries in +your /usr/local/ folders, so follow the directions from brew if these are detected. + +``` +brew install gcc48 --enable-all-languages +brew install ffmpeg +brew install librsvg +brew install swig +brew install doxygen +brew install unittest-cpp --cc=gcc-4.8. You must specify the c++ compiler with the --cc flag to be 4.7 or 4.8. +brew install qt5 +brew install cmake +brew install zeromq +``` + +## Mac Build Instructions (libopenshot-audio) +Since libopenshot-audio is not available in a Homebrew or MacPorts package, we need to go through a +few additional steps to manually build and install it. Launch a terminal and enter: + +``` +cd [libopenshot-audio repo folder] +mkdir build +cd build +cmake -d -G "Unix Makefiles" -DCMAKE_CXX_COMPILER=clang++ -DCMAKE_C_COMPILER=clang ../ (CLang must be used due to GNU incompatible Objective-C code in some of the Apple frameworks) +make +make install +./src/openshot-audio-test-sound (This should play a test sound) +``` + +## Mac Build Instructions (libopenshot) +Run the following commands to build libopenshot: + +``` +$ cd [libopenshot repo folder] +$ mkdir build +$ cd build +$ cmake -G "Unix Makefiles" -DCMAKE_CXX_COMPILER=/usr/local/opt/gcc48/bin/g++-4.8 -DCMAKE_C_COMPILER=/usr/local/opt/gcc48/bin/gcc-4.8 -DCMAKE_PREFIX_PATH=/usr/local/Cellar/qt5/5.4.2/ -DPYTHON_INCLUDE_DIR=/usr/local/Cellar/python3/3.3.2/Frameworks/Python.framework/Versions/3.3/include/python3.3m/ -DPYTHON_LIBRARY=/usr/local/Cellar/python3/3.3.2/Frameworks/Python.framework/Versions/3.3/lib/libpython3.3.dylib -DPython_FRAMEWORKS=/usr/local/Cellar/python3/3.3.2/Frameworks/Python.framework/ ../ -D"CMAKE_BUILD_TYPE:STRING=Debug" +``` + +The extra arguments on the cmake command make sure the compiler will be gcc4.8 and that cmake +knows where to look for the Qt header files and Python library. Double check these file paths, +as yours will likely be different. + +``` +make +``` + +If you are missing any dependencies for libopenshot, you will receive error messages at this point. +Just install the missing dependencies, and run the above commands again. Repeat until no error +messages are displayed and the build process completes. + +Also, if you are having trouble building, please see the CMake Flags section above, as it might +provide a solution for finding a missing folder path, missing Python 3 library, etc... + +To run all unit tests (and verify everything is working correctly), launch a terminal, and enter: + +``` +make test +``` + +To auto-generate the documentation for libopenshot, launch a terminal, and enter: + +``` +make doc +``` + +This will use doxygen to generate a folder of HTML files, with all classes and methods documented. +The folder is located at build/doc/html/. Once libopenshot has been successfully built, we need +to install it (i.e. copy it to the correct folder, so other libraries can find it). + +``` +make install +``` + +This should copy the binary files to /usr/local/lib/, and the header files to /usr/local/include/openshot/... +This is where other projects will look for the libopenshot files when building. Python 3 bindings are +also installed at this point. let's verify the python bindings work: + +``` +python3 (or python) +>>> import openshot +``` + +If no errors are displayed, you have successfully compiled and installed libopenshot on your +system. Congratulations and be sure to read our wiki on [Becoming an OpenShot Developer](https://github.com/OpenShot/openshot-qt/wiki/Become-a-Developer)! +Welcome to the OpenShot developer community! We look forward to meeting you! diff -Nru libopenshot-0.2.2+dfsg1/doc/INSTALL-WINDOWS.md libopenshot-0.2.5+dfsg1/doc/INSTALL-WINDOWS.md --- libopenshot-0.2.2+dfsg1/doc/INSTALL-WINDOWS.md 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/doc/INSTALL-WINDOWS.md 2020-03-03 08:00:06.000000000 +0000 @@ -0,0 +1,331 @@ +# Building libopenshot for Windows + +## Getting Started + +The best way to get started with libopenshot, is to learn about our build system, obtain all the +source code, install a development IDE and tools, and better understand our dependencies. So, +please read through the following sections, and follow the instructions. And keep in mind, +that your computer is likely different than the one used when writing these instructions. +Your file paths and versions of applications might be slightly different, so keep an eye out +for subtle file path differences in the commands you type. + +## Build Tools + +CMake is the backbone of our build system. It is a cross-platform build system, which +checks for dependencies, locates header files and libraries, generates makefiles, and +supports the cross-platform compiling of libopenshot and libopenshot-audio. CMake uses +an out-of-source build concept, where all temporary build files, such as makefiles, +object files, and even the final binaries, are created outside of the source code +folder, inside a /build/ sub-folder. This prevents the build process from cluttering +up the source code. These instructions have only been tested with the GNU compiler +(including MSYS2/MinGW for Windows). + +## Dependencies + +The following libraries are required to build libopenshot. Instructions on how to +install these dependencies vary for each operating system. Libraries and Executables +have been labeled in the list below to help distinguish between them. + +### FFmpeg (libavformat, libavcodec, libavutil, libavdevice, libavresample, libswscale) + * http://www.ffmpeg.org/ `(Library)` + * This library is used to decode and encode video, audio, and image files. It is also used to obtain information about media files, such as frame rate, sample rate, aspect ratio, and other common attributes. + +### ImageMagick++ (libMagick++, libMagickWand, libMagickCore) + * http://www.imagemagick.org/script/magick++.php `(Library)` + * This library is **optional**, and used to decode and encode images. + +### OpenShot Audio Library (libopenshot-audio) + * https://github.com/OpenShot/libopenshot-audio/ `(Library)` + * This library is used to mix, resample, host plug-ins, and play audio. It is based on the JUCE project, which is an outstanding audio library used by many different applications + +### Qt 5 (libqt5) + * http://www.qt.io/qt5/ `(Library)` + * Qt5 is used to display video, store image data, composite images, apply image effects, and many other utility functions, such as file system manipulation, high resolution timers, etc... + +### CMake (cmake) + * http://www.cmake.org/ `(Executable)` + * This executable is used to automate the generation of Makefiles, check for dependencies, and is the backbone of libopenshot’s cross-platform build process. + +### SWIG (swig) + * http://www.swig.org/ `(Executable)` + * This executable is used to generate the Python and Ruby bindings for libopenshot. It is a simple and powerful wrapper for C++ libraries, and supports many languages. + +### Python 3 (libpython) + * http://www.python.org/ `(Executable and Library)` + * This library is used by swig to create the Python (version 3+) bindings for libopenshot. This is also the official language used by OpenShot Video Editor (a graphical interface to libopenshot). + +### Doxygen (doxygen) + * http://www.stack.nl/~dimitri/doxygen/ `(Executable)` + * This executable is used to auto-generate the documentation used by libopenshot. + +### UnitTest++ (libunittest++) + * https://github.com/unittest-cpp/ `(Library)` + * This library is used to execute unit tests for libopenshot. It contains many macros used to keep our unit testing code very clean and simple. + +### ZeroMQ (libzmq) + * http://zeromq.org/ `(Library)` + * This library is used to communicate between libopenshot and other applications (publisher / subscriber). Primarily used to send debug data from libopenshot. + +### OpenMP (-fopenmp) + * http://openmp.org/wp/ `(Compiler Flag)` + * If your compiler supports this flag (GCC, Clang, and most other compilers), it provides libopenshot with easy methods of using parallel programming techniques to improve performance and take advantage of multi-core processors. + +## CMake Flags (Optional) +There are many different build flags that can be passed to cmake to adjust how libopenshot +is compiled. Some of these flags might be required when compiling on certain OSes, just +depending on how your build environment is setup. To add a build flag, follow this general +syntax: `cmake -DMAGICKCORE_HDRI_ENABLE=1 -DENABLE_TESTS=1 ../` + +* MAGICKCORE_HDRI_ENABLE (default 0) +* MAGICKCORE_QUANTUM_DEPTH (default 0) +* OPENSHOT_IMAGEMAGICK_COMPATIBILITY (default 0) +* DISABLE_TESTS (default 0) +* CMAKE_PREFIX_PATH (`/location/to/missing/library/`) +* PYTHON_INCLUDE_DIR (`/location/to/python/include/`) +* PYTHON_LIBRARY (`/location/to/python/lib.a`) +* PYTHON_FRAMEWORKS (`/usr/local/Cellar/python3/3.3.2/Frameworks/Python.framework/`) +* CMAKE_CXX_COMPILER (`/location/to/mingw/g++`) +* CMAKE_C_COMPILER (`/location/to/mingw/gcc`) + +## Environment Variables + +Many environment variables will need to be set during this Windows installation guide. +The command line will need to be closed and re-launched after any changes to your environment +variables. Also, dependency libraries will not be found during linking or execution without +being found in the PATH environment variable. So, if you get errors related to missing +commands or libraries, double check the PATH variable. + +The following environment variables need to be added to your “System Variables”. Be sure to +check each folder path for accuracy, as your paths will likely be different than this list. + +### Example Variables + +* DL_DIR (`C:\libdl`) +* DXSDK_DIR (`C:\Program Files\Microsoft DirectX SDK (June 2010)\`) +* FFMPEGDIR (`C:\ffmpeg-git-95f163b-win32-dev`) +* FREETYPE_DIR (`C:\Program Files\GnuWin32`) +* HOME (`C:\msys\1.0\home`) +* LIBOPENSHOT_AUDIO_DIR (`C:\Program Files\libopenshot-audio`) +* QTDIR (`C:\qt5`) +* SNDFILE_DIR (`C:\Program Files\libsndfile`) +* UNITTEST_DIR (`C:\UnitTest++`) +* ZMQDIR (`C:\msys2\usr\local\`) +* PATH (`The following paths are an example`) + * `C:\Qt5\bin; C:\Qt5\MinGW\bin\; C:\msys\1.0\local\lib; C:\Program Files\CMake 2.8\bin; C:\UnitTest++\build; C:\libopenshot\build\src; C:\Program Files\doxygen\bin; C:\ffmpeg-git-95f163b-win32-dev\lib; C:\swigwin-2.0.4; C:\Python33; C:\Program Files\Project\lib; C:\msys2\usr\local\` + + + + + +## Obtaining Source Code + +The first step in installing libopenshot is to obtain the most recent source code. The source code +is available on [GitHub](https://github.com/OpenShot/libopenshot). Use the following command to +obtain the latest libopenshot source code. + +``` +git clone https://github.com/OpenShot/libopenshot.git +git clone https://github.com/OpenShot/libopenshot-audio.git +``` + +## Folder Structure (libopenshot) + +The source code is divided up into the following folders. + +### build/ + * This folder needs to be manually created, and is used by cmake to store the temporary + build files, such as makefiles, as well as the final binaries (library and test executables). + +### cmake/ + * This folder contains custom modules not included by default in cmake, used to find + dependency libraries and headers and determine if these libraries are installed. + +### doc/ + * This folder contains documentation and related files, such as logos and images + required by the doxygen auto-generated documentation. + +### include/ + * This folder contains all headers (*.h) used by libopenshot. + +### src/ + * This folder contains all source code (*.cpp) used by libopenshot. + +### tests/ + * This folder contains all unit test code. Each class has it’s own test file (*.cpp), and + uses UnitTest++ macros to keep the test code simple and manageable. + +### thirdparty/ + * This folder contains code not written by the OpenShot team. For example, jsoncpp, an + open-source JSON parser. + +## Install MSYS2 Dependencies + +Most Windows dependencies needed for libopenshot-audio, libopenshot, and openshot-qt +can be installed easily with MSYS2 and the pacman package manager. Follow these +directions to setup a Windows build environment for OpenShot. + +1) Install MSYS2: http://www.msys2.org/ + +2) Run MSYS2 command prompt (for example: `C:\msys64\msys2_shell.cmd`) + +3) Append PATH (so MSYS2 can find executables and libraries): + +``` +PATH=$PATH:/c/msys64/mingw64/bin:/c/msys64/mingw64/lib (64-bit PATH) + or +PATH=$PATH:/c/msys32/mingw32/bin:/c/msys32/mingw32/lib (32-bit PATH) +``` + +4) Update and upgrade all packages + +``` +pacman -Syu +``` + +5a) Install the following packages (**64-Bit**) + +``` +pacman -S --needed base-devel mingw-w64-x86_64-toolchain +pacman -S mingw64/mingw-w64-x86_64-ffmpeg +pacman -S mingw64/mingw-w64-x86_64-python3-pyqt5 +pacman -S mingw64/mingw-w64-x86_64-swig +pacman -S mingw64/mingw-w64-x86_64-cmake +pacman -S mingw64/mingw-w64-x86_64-doxygen +pacman -S mingw64/mingw-w64-x86_64-python3-pip +pacman -S mingw32/mingw-w64-i686-zeromq +pacman -S mingw64/mingw-w64-x86_64-python3-pyzmq +pacman -S mingw64/mingw-w64-x86_64-python3-cx_Freeze +pacman -S git + +# Install ImageMagick if needed (OPTIONAL and NOT NEEDED) +pacman -S mingw64/mingw-w64-x86_64-imagemagick +``` + +5b) **Or** Install the following packages (**32-Bit**) + +``` +pacman -S --needed base-devel mingw32/mingw-w64-i686-toolchain +pacman -S mingw32/mingw-w64-i686-ffmpeg +pacman -S mingw32/mingw-w64-i686-python3-pyqt5 +pacman -S mingw32/mingw-w64-i686-swig +pacman -S mingw32/mingw-w64-i686-cmake +pacman -S mingw32/mingw-w64-i686-doxygen +pacman -S mingw32/mingw-w64-i686-python3-pip +pacman -S mingw32/mingw-w64-i686-zeromq +pacman -S mingw32/mingw-w64-i686-python3-pyzmq +pacman -S mingw32/mingw-w64-i686-python3-cx_Freeze +pacman -S git + +# Install ImageMagick if needed (OPTIONAL and NOT NEEDED) +pacman -S mingw32/mingw-w32-x86_32-imagemagick +``` + +6) Install Python PIP Dependencies + +``` +pip3 install httplib2 +pip3 install slacker +pip3 install tinys3 +pip3 install github3.py +pip3 install requests +``` + +7) Download Unittest++ (https://github.com/unittest-cpp/unittest-cpp) into /MSYS2/[USER]/unittest-cpp-master/ + +``` +cmake -G "MSYS Makefiles" ../ -DCMAKE_MAKE_PROGRAM=mingw32-make -DCMAKE_INSTALL_PREFIX:PATH=/usr +mingw32-make install +``` + +8) ZMQ++ Header (This might not be needed anymore) + NOTE: Download and copy zmq.hpp into the /c/msys64/mingw64/include/ folder + +## Manual Dependencies + +### DLfcn + * https://github.com/dlfcn-win32/dlfcn-win32 + * Download and Extract the Win32 Static (.tar.bz2) archive to a local folder: `C:\libdl\` + * Create an environment variable called DL_DIR and set the value to `C:\libdl\`. This environment variable will be used by CMake to find the binary and header file. + +### DirectX SDK / Windows SDK + * Windows 7: (DirectX SDK) http://www.microsoft.com/download/en/details.aspx?displaylang=en&id=6812 + * Windows 8: (Windows SDK) + * https://msdn.microsoft.com/en-us/windows/desktop/aa904949 + * Download and Install the SDK Setup program. This is needed for the JUCE library to play audio on Windows. +Create an environment variable called DXSDK_DIR and set the value to `C:\Program Files\Microsoft DirectX SDK (June 2010)\` (your path might be different). This environment variable will be used by CMake to find the binaries and header files. + +### libSndFile + * http://www.mega-nerd.com/libsndfile/#Download + * Download and Install the Win32 Setup program. + * Create an environment variable called SNDFILE_DIR and set the value to `C:\Program Files\libsndfile`. This environment variable will be used by CMake to find the binary and header files. + +### libzmq + * http://zeromq.org/intro:get-the-software + * Download source code (zip) + * Follow their instructions, and build with mingw + * Create an environment variable called ZMQDIR and set the value to `C:\libzmq\build\` (the location of the compiled version). This environment variable will be used by CMake to find the binary and header files. + +## Windows Build Instructions (libopenshot-audio) +In order to compile libopenshot-audio, launch a command prompt and enter the following commands. This does not require the MSYS2 prompt, but it should work in both the Windows command prompt and the MSYS2 prompt. + +``` +cd [libopenshot-audio repo folder] +mkdir build +cd build +cmake -G “MinGW Makefiles” ../ +mingw32-make +mingw32-make install +openshot-audio-test-sound (This should play a test sound) +``` + +## Windows Build Instructions (libopenshot) +Run the following commands to build libopenshot: + +``` +cd [libopenshot repo folder] +mkdir build +cd build +cmake -G "MinGW Makefiles" -DPYTHON_INCLUDE_DIR="C:/Python34/include/" -DPYTHON_LIBRARY="C:/Python34/libs/libpython34.a" ../ +mingw32-make +``` + +If you are missing any dependencies for libopenshot, you will receive error messages at this point. +Just install the missing dependencies, and run the above commands again. Repeat until no error +messages are displayed and the build process completes. + +Also, if you are having trouble building, please see the CMake Flags section above, as +it might provide a solution for finding a missing folder path, missing Python 3 library, etc... + +To run all unit tests (and verify everything is working correctly), launch a terminal, and enter: + +``` +mingw32-make test +``` + +To auto-generate the documentation for libopenshot, launch a terminal, and enter: + +``` +mingw32-make doc +``` + +This will use doxygen to generate a folder of HTML files, with all classes and methods +documented. The folder is located at build/doc/html/. Once libopenshot has been successfully +built, we need to install it (i.e. copy it to the correct folder, so other libraries can find it). + +``` +mingw32-make install +``` + +This should copy the binary files to `C:\Program Files\openshot\lib\`, and the header +files to `C:\Program Files\openshot\include\...` This is where other projects will +look for the libopenshot files when building.. Python 3 bindings are also installed +at this point. let's verify the python bindings work: + +``` +python3 +>>> import openshot +``` + +If no errors are displayed, you have successfully compiled and installed libopenshot on +your system. Congratulations and be sure to read our wiki on [Becoming an OpenShot Developer](https://github.com/OpenShot/openshot-qt/wiki/Become-a-Developer)! +Welcome to the OpenShot developer community! We look forward to meeting you! diff -Nru libopenshot-0.2.2+dfsg1/Doxyfile.in libopenshot-0.2.5+dfsg1/Doxyfile.in --- libopenshot-0.2.2+dfsg1/Doxyfile.in 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/Doxyfile.in 2020-03-03 08:00:06.000000000 +0000 @@ -1,4 +1,4 @@ -# Doxyfile 1.8.6 +# Doxyfile 1.8.15 # This file describes the settings to be used by the documentation system # doxygen (www.doxygen.org) for a project. @@ -17,11 +17,11 @@ # Project related configuration options #--------------------------------------------------------------------------- -# This tag specifies the encoding used for all characters in the config file -# that follow. The default is UTF-8 which is also the encoding used for all text -# before the first occurrence of this tag. Doxygen uses libiconv (or the iconv -# built into libc) for the transcoding. See http://www.gnu.org/software/libiconv -# for the list of possible encodings. +# This tag specifies the encoding used for all characters in the configuration +# file that follow. The default is UTF-8 which is also the encoding used for all +# text before the first occurrence of this tag. Doxygen uses libiconv (or the +# iconv built into libc) for the transcoding. See +# https://www.gnu.org/software/libiconv/ for the list of possible encodings. # The default value is: UTF-8. DOXYFILE_ENCODING = UTF-8 @@ -46,10 +46,10 @@ PROJECT_BRIEF = -# With the PROJECT_LOGO tag one can specify an logo or icon that is included in -# the documentation. The maximum height of the logo should not exceed 55 pixels -# and the maximum width should not exceed 200 pixels. Doxygen will copy the logo -# to the output directory. +# With the PROJECT_LOGO tag one can specify a logo or an icon that is included +# in the documentation. The maximum height of the logo should not exceed 55 +# pixels and the maximum width should not exceed 200 pixels. Doxygen will copy +# the logo to the output directory. PROJECT_LOGO = @@ -60,7 +60,7 @@ OUTPUT_DIRECTORY = "@DOXYFILE_OUTPUT_DIR@" -# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create 4096 sub- +# If the CREATE_SUBDIRS tag is set to YES then doxygen will create 4096 sub- # directories (in 2 levels) under the output directory of each output format and # will distribute the generated files over these directories. Enabling this # option can be useful when feeding doxygen a huge amount of source files, where @@ -70,6 +70,14 @@ CREATE_SUBDIRS = NO +# If the ALLOW_UNICODE_NAMES tag is set to YES, doxygen will allow non-ASCII +# characters to appear in the names of generated files. If set to NO, non-ASCII +# characters will be escaped, for example _xE3_x81_x84 will be used for Unicode +# U+3044. +# The default value is: NO. + +ALLOW_UNICODE_NAMES = NO + # The OUTPUT_LANGUAGE tag is used to specify the language in which all # documentation generated by doxygen is written. Doxygen will use this # information to generate all constant output in the proper language. @@ -85,14 +93,22 @@ OUTPUT_LANGUAGE = English -# If the BRIEF_MEMBER_DESC tag is set to YES doxygen will include brief member +# The OUTPUT_TEXT_DIRECTION tag is used to specify the direction in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all generated output in the proper direction. +# Possible values are: None, LTR, RTL and Context. +# The default value is: None. + +OUTPUT_TEXT_DIRECTION = None + +# If the BRIEF_MEMBER_DESC tag is set to YES, doxygen will include brief member # descriptions after the members that are listed in the file and class # documentation (similar to Javadoc). Set to NO to disable this. # The default value is: YES. BRIEF_MEMBER_DESC = YES -# If the REPEAT_BRIEF tag is set to YES doxygen will prepend the brief +# If the REPEAT_BRIEF tag is set to YES, doxygen will prepend the brief # description of a member or function before the detailed description # # Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the @@ -127,7 +143,7 @@ INLINE_INHERITED_MEMB = NO -# If the FULL_PATH_NAMES tag is set to YES doxygen will prepend the full path +# If the FULL_PATH_NAMES tag is set to YES, doxygen will prepend the full path # before files name in the file list and in the header files. If set to NO the # shortest path that makes the file name unique will be used # The default value is: YES. @@ -197,9 +213,9 @@ INHERIT_DOCS = YES -# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce a -# new page for each member. If set to NO, the documentation of a member will be -# part of the file/class/namespace that contains it. +# If the SEPARATE_MEMBER_PAGES tag is set to YES then doxygen will produce a new +# page for each member. If set to NO, the documentation of a member will be part +# of the file/class/namespace that contains it. # The default value is: NO. SEPARATE_MEMBER_PAGES = NO @@ -218,7 +234,12 @@ # will allow you to put the command \sideeffect (or @sideeffect) in the # documentation, which will result in a user-defined paragraph with heading # "Side Effects:". You can put \n's in the value part of an alias to insert -# newlines. +# newlines (in the resulting output). You can put ^^ in the value part of an +# alias to insert a newline as if a physical newline was in the original file. +# When you need a literal { or } or , in the value part of an alias you have to +# escape them by means of a backslash (\), this can lead to conflicts with the +# commands \{ and \} for these it is advised to use the version @{ and @} or use +# a double escape (\\{ and \\}) ALIASES = @@ -256,16 +277,28 @@ OPTIMIZE_OUTPUT_VHDL = NO +# Set the OPTIMIZE_OUTPUT_SLICE tag to YES if your project consists of Slice +# sources only. Doxygen will then generate output that is more tailored for that +# language. For instance, namespaces will be presented as modules, types will be +# separated into more groups, etc. +# The default value is: NO. + +OPTIMIZE_OUTPUT_SLICE = NO + # Doxygen selects the parser to use depending on the extension of the files it # parses. With this tag you can assign which parser to use for a given # extension. Doxygen has a built-in mapping, but you can override or extend it # using this tag. The format is ext=language, where ext is a file extension, and # language is one of the parsers supported by doxygen: IDL, Java, Javascript, -# C#, C, C++, D, PHP, Objective-C, Python, Fortran, VHDL. For instance to make -# doxygen treat .inc files as Fortran files (default is PHP), and .f files as C -# (default is Fortran), use: inc=Fortran f=C. +# Csharp (C#), C, C++, D, PHP, md (Markdown), Objective-C, Python, Slice, +# Fortran (fixed format Fortran: FortranFixed, free formatted Fortran: +# FortranFree, unknown formatted Fortran: Fortran. In the later case the parser +# tries to guess whether the code is fixed or free formatted code, this is the +# default for Fortran type files), VHDL, tcl. For instance to make doxygen treat +# .inc files as Fortran files (default is PHP), and .f files as C (default is +# Fortran), use: inc=Fortran f=C. # -# Note For files without extension you can use no_extension as a placeholder. +# Note: For files without extension you can use no_extension as a placeholder. # # Note that for custom extensions you also need to set FILE_PATTERNS otherwise # the files are not read by doxygen. @@ -274,7 +307,7 @@ # If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments # according to the Markdown format, which allows for more readable -# documentation. See http://daringfireball.net/projects/markdown/ for details. +# documentation. See https://daringfireball.net/projects/markdown/ for details. # The output of markdown processing is further processed by doxygen, so you can # mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in # case of backward compatibilities issues. @@ -282,10 +315,19 @@ MARKDOWN_SUPPORT = YES +# When the TOC_INCLUDE_HEADINGS tag is set to a non-zero value, all headings up +# to that level are automatically included in the table of contents, even if +# they do not have an id attribute. +# Note: This feature currently applies only to Markdown headings. +# Minimum value: 0, maximum value: 99, default value: 0. +# This tag requires that the tag MARKDOWN_SUPPORT is set to YES. + +TOC_INCLUDE_HEADINGS = 0 + # When enabled doxygen tries to link words that correspond to documented # classes, or namespaces to their corresponding documentation. Such a link can -# be prevented in individual cases by by putting a % sign in front of the word -# or globally by setting AUTOLINK_SUPPORT to NO. +# be prevented in individual cases by putting a % sign in front of the word or +# globally by setting AUTOLINK_SUPPORT to NO. # The default value is: YES. AUTOLINK_SUPPORT = YES @@ -298,7 +340,7 @@ # diagrams that involve STL classes more complete and accurate. # The default value is: NO. -BUILTIN_STL_SUPPORT = NO +BUILTIN_STL_SUPPORT = YES # If you use Microsoft's C++/CLI language, you should set this option to YES to # enable parsing support. @@ -307,7 +349,7 @@ CPP_CLI_SUPPORT = NO # Set the SIP_SUPPORT tag to YES if your project consists of sip (see: -# http://www.riverbankcomputing.co.uk/software/sip/intro) sources only. Doxygen +# https://www.riverbankcomputing.com/software/sip/intro) sources only. Doxygen # will parse them like normal C++ but will assume all classes use public instead # of private inheritance when no explicit protection keyword is present. # The default value is: NO. @@ -325,13 +367,20 @@ IDL_PROPERTY_SUPPORT = YES # If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC -# tag is set to YES, then doxygen will reuse the documentation of the first +# tag is set to YES then doxygen will reuse the documentation of the first # member in the group (if any) for the other members of the group. By default # all members of a group must be documented explicitly. # The default value is: NO. DISTRIBUTE_GROUP_DOC = NO +# If one adds a struct or class to a group and this option is enabled, then also +# any nested class or struct is added to the same group. By default this option +# is disabled and one has to add nested compounds explicitly via \ingroup. +# The default value is: NO. + +GROUP_NESTED_COMPOUNDS = NO + # Set the SUBGROUPING tag to YES to allow class member groups of the same type # (for instance a group of public functions) to be put as a subgroup of that # type (e.g. under the Public Functions section). Set it to NO to prevent @@ -390,7 +439,7 @@ # Build related configuration options #--------------------------------------------------------------------------- -# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in +# If the EXTRACT_ALL tag is set to YES, doxygen will assume all entities in # documentation are documented, even if no documentation was available. Private # class members and static file members will be hidden unless the # EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES. @@ -400,35 +449,35 @@ EXTRACT_ALL = YES -# If the EXTRACT_PRIVATE tag is set to YES all private members of a class will +# If the EXTRACT_PRIVATE tag is set to YES, all private members of a class will # be included in the documentation. # The default value is: NO. EXTRACT_PRIVATE = NO -# If the EXTRACT_PACKAGE tag is set to YES all members with package or internal +# If the EXTRACT_PACKAGE tag is set to YES, all members with package or internal # scope will be included in the documentation. # The default value is: NO. EXTRACT_PACKAGE = NO -# If the EXTRACT_STATIC tag is set to YES all static members of a file will be +# If the EXTRACT_STATIC tag is set to YES, all static members of a file will be # included in the documentation. # The default value is: NO. EXTRACT_STATIC = NO -# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) defined -# locally in source files will be included in the documentation. If set to NO +# If the EXTRACT_LOCAL_CLASSES tag is set to YES, classes (and structs) defined +# locally in source files will be included in the documentation. If set to NO, # only classes defined in header files are included. Does not have any effect # for Java sources. # The default value is: YES. EXTRACT_LOCAL_CLASSES = YES -# This flag is only useful for Objective-C code. When set to YES local methods, +# This flag is only useful for Objective-C code. If set to YES, local methods, # which are defined in the implementation section but not in the interface are -# included in the documentation. If set to NO only methods in the interface are +# included in the documentation. If set to NO, only methods in the interface are # included. # The default value is: NO. @@ -453,21 +502,21 @@ # If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all # undocumented classes that are normally visible in the class hierarchy. If set -# to NO these classes will be included in the various overviews. This option has -# no effect if EXTRACT_ALL is enabled. +# to NO, these classes will be included in the various overviews. This option +# has no effect if EXTRACT_ALL is enabled. # The default value is: NO. HIDE_UNDOC_CLASSES = NO # If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend -# (class|struct|union) declarations. If set to NO these declarations will be +# (class|struct|union) declarations. If set to NO, these declarations will be # included in the documentation. # The default value is: NO. HIDE_FRIEND_COMPOUNDS = NO # If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any -# documentation blocks found inside the body of a function. If set to NO these +# documentation blocks found inside the body of a function. If set to NO, these # blocks will be appended to the function's detailed documentation block. # The default value is: NO. @@ -481,7 +530,7 @@ INTERNAL_DOCS = NO # If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file -# names in lower-case letters. If set to YES upper-case letters are also +# names in lower-case letters. If set to YES, upper-case letters are also # allowed. This is useful if you have classes or files whose names only differ # in case and if your file system supports case sensitive file names. Windows # and Mac users are advised to set this option to NO. @@ -490,12 +539,19 @@ CASE_SENSE_NAMES = YES # If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with -# their full class and namespace scopes in the documentation. If set to YES the +# their full class and namespace scopes in the documentation. If set to YES, the # scope will be hidden. # The default value is: NO. HIDE_SCOPE_NAMES = NO +# If the HIDE_COMPOUND_REFERENCE tag is set to NO (default) then doxygen will +# append additional text to a page's title, such as Class Reference. If set to +# YES the compound reference will be hidden. +# The default value is: NO. + +HIDE_COMPOUND_REFERENCE= NO + # If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of # the files that are included by a file in the documentation of that file. # The default value is: YES. @@ -523,14 +579,14 @@ # If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the # (detailed) documentation of file and class members alphabetically by member -# name. If set to NO the members will appear in declaration order. +# name. If set to NO, the members will appear in declaration order. # The default value is: YES. SORT_MEMBER_DOCS = YES # If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief # descriptions of file, namespace and class members alphabetically by member -# name. If set to NO the members will appear in declaration order. Note that +# name. If set to NO, the members will appear in declaration order. Note that # this will also influence the order of the classes in the class list. # The default value is: NO. @@ -575,27 +631,25 @@ STRICT_PROTO_MATCHING = NO -# The GENERATE_TODOLIST tag can be used to enable ( YES) or disable ( NO) the -# todo list. This list is created by putting \todo commands in the -# documentation. +# The GENERATE_TODOLIST tag can be used to enable (YES) or disable (NO) the todo +# list. This list is created by putting \todo commands in the documentation. # The default value is: YES. GENERATE_TODOLIST = YES -# The GENERATE_TESTLIST tag can be used to enable ( YES) or disable ( NO) the -# test list. This list is created by putting \test commands in the -# documentation. +# The GENERATE_TESTLIST tag can be used to enable (YES) or disable (NO) the test +# list. This list is created by putting \test commands in the documentation. # The default value is: YES. GENERATE_TESTLIST = YES -# The GENERATE_BUGLIST tag can be used to enable ( YES) or disable ( NO) the bug +# The GENERATE_BUGLIST tag can be used to enable (YES) or disable (NO) the bug # list. This list is created by putting \bug commands in the documentation. # The default value is: YES. GENERATE_BUGLIST = YES -# The GENERATE_DEPRECATEDLIST tag can be used to enable ( YES) or disable ( NO) +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or disable (NO) # the deprecated list. This list is created by putting \deprecated commands in # the documentation. # The default value is: YES. @@ -620,8 +674,8 @@ MAX_INITIALIZER_LINES = 30 # Set the SHOW_USED_FILES tag to NO to disable the list of files generated at -# the bottom of the documentation of classes and structs. If set to YES the list -# will mention the files that were used to generate the documentation. +# the bottom of the documentation of classes and structs. If set to YES, the +# list will mention the files that were used to generate the documentation. # The default value is: YES. SHOW_USED_FILES = YES @@ -666,11 +720,10 @@ # The CITE_BIB_FILES tag can be used to specify one or more bib files containing # the reference definitions. This must be a list of .bib files. The .bib # extension is automatically appended if omitted. This requires the bibtex tool -# to be installed. See also http://en.wikipedia.org/wiki/BibTeX for more info. +# to be installed. See also https://en.wikipedia.org/wiki/BibTeX for more info. # For LaTeX the style of the bibliography can be controlled using # LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the -# search path. Do not use file names with spaces, bibtex cannot handle them. See -# also \cite for info how to create references. +# search path. See also \cite for info how to create references. CITE_BIB_FILES = @@ -686,7 +739,7 @@ QUIET = YES # The WARNINGS tag can be used to turn on/off the warning messages that are -# generated to standard error ( stderr) by doxygen. If WARNINGS is set to YES +# generated to standard error (stderr) by doxygen. If WARNINGS is set to YES # this implies that the warnings are on. # # Tip: Turn warnings on while writing the documentation. @@ -694,7 +747,7 @@ WARNINGS = YES -# If the WARN_IF_UNDOCUMENTED tag is set to YES, then doxygen will generate +# If the WARN_IF_UNDOCUMENTED tag is set to YES then doxygen will generate # warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag # will automatically be disabled. # The default value is: YES. @@ -711,12 +764,19 @@ # This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that # are documented, but have no documentation for their parameters or return -# value. If set to NO doxygen will only warn about wrong or incomplete parameter -# documentation, but not about the absence of documentation. +# value. If set to NO, doxygen will only warn about wrong or incomplete +# parameter documentation, but not about the absence of documentation. If +# EXTRACT_ALL is set to YES then this flag will automatically be disabled. # The default value is: NO. WARN_NO_PARAMDOC = NO +# If the WARN_AS_ERROR tag is set to YES then doxygen will immediately stop when +# a warning is encountered. +# The default value is: NO. + +WARN_AS_ERROR = NO + # The WARN_FORMAT tag determines the format of the warning messages that doxygen # can produce. The string should contain the $file, $line, and $text tags, which # will be replaced by the file and line number from which the warning originated @@ -740,16 +800,18 @@ # The INPUT tag is used to specify the files and/or directories that contain # documented source files. You may enter file names like myfile.cpp or # directories like /usr/src/myproject. Separate the files or directories with -# spaces. +# spaces. See also FILE_PATTERNS and EXTENSION_MAPPING # Note: If this tag is empty the current directory is searched. INPUT = "@CMAKE_CURRENT_SOURCE_DIR@/include" \ - "@CMAKE_CURRENT_SOURCE_DIR@/src" + "@CMAKE_CURRENT_SOURCE_DIR@/src" \ + "@CMAKE_CURRENT_SOURCE_DIR@/doc" + # This tag can be used to specify the character encoding of the source files # that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses # libiconv (or the iconv built into libc) for the transcoding. See the libiconv -# documentation (see: http://www.gnu.org/software/libiconv) for the list of +# documentation (see: https://www.gnu.org/software/libiconv/) for the list of # possible encodings. # The default value is: UTF-8. @@ -757,12 +819,17 @@ # If the value of the INPUT tag contains directories, you can use the # FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and -# *.h) to filter out the source-files in the directories. If left blank the -# following patterns are tested:*.c, *.cc, *.cxx, *.cpp, *.c++, *.java, *.ii, -# *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h, *.hh, *.hxx, *.hpp, -# *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc, *.m, *.markdown, -# *.md, *.mm, *.dox, *.py, *.f90, *.f, *.for, *.tcl, *.vhd, *.vhdl, *.ucf, -# *.qsf, *.as and *.js. +# *.h) to filter out the source-files in the directories. +# +# Note that for custom extensions or not directly supported extensions you also +# need to set EXTENSION_MAPPING for the extension otherwise the files are not +# read by doxygen. +# +# If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cpp, +# *.c++, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h, +# *.hh, *.hxx, *.hpp, *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc, +# *.m, *.markdown, *.md, *.mm, *.dox, *.py, *.pyw, *.f90, *.f95, *.f03, *.f08, +# *.f, *.for, *.tcl, *.vhd, *.vhdl, *.ucf, *.qsf and *.ice. FILE_PATTERNS = @@ -797,8 +864,9 @@ EXCLUDE_PATTERNS = "*/.*" \ "*/.*/*" \ - "*/src/Main.cpp*" \ - "*/src/Main_Blackmagic.cpp*" + "*/src/examples/*" \ + "*/src/bindings/*" \ + "*.py" # The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names # (namespaces, classes, functions, etc.) that should be excluded from the @@ -851,6 +919,10 @@ # Note that the filter must not add or remove lines; it is applied before the # code is scanned, but not when the output code is generated. If lines are added # or removed, the anchors will not be placed correctly. +# +# Note that for custom extensions or not directly supported extensions you also +# need to set EXTENSION_MAPPING for the extension otherwise the files are not +# properly processed by doxygen. INPUT_FILTER = @@ -860,11 +932,15 @@ # (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how # filters are used. If the FILTER_PATTERNS tag is empty or if none of the # patterns match the file name, INPUT_FILTER is applied. +# +# Note that for custom extensions or not directly supported extensions you also +# need to set EXTENSION_MAPPING for the extension otherwise the files are not +# properly processed by doxygen. FILTER_PATTERNS = # If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using -# INPUT_FILTER ) will also be used to filter the input files that are used for +# INPUT_FILTER) will also be used to filter the input files that are used for # producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES). # The default value is: NO. @@ -912,10 +988,10 @@ STRIP_CODE_COMMENTS = NO # If the REFERENCED_BY_RELATION tag is set to YES then for each documented -# function all documented functions referencing it will be listed. +# entity all documented functions referencing it will be listed. # The default value is: NO. -REFERENCED_BY_RELATION = NO +REFERENCED_BY_RELATION = YES # If the REFERENCES_RELATION tag is set to YES then for each documented function # all documented entities called/used by that function will be listed. @@ -924,7 +1000,7 @@ REFERENCES_RELATION = NO # If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set -# to YES, then the hyperlinks from functions in REFERENCES_RELATION and +# to YES then the hyperlinks from functions in REFERENCES_RELATION and # REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will # link to the documentation. # The default value is: YES. @@ -944,12 +1020,12 @@ # If the USE_HTAGS tag is set to YES then the references to source code will # point to the HTML generated by the htags(1) tool instead of doxygen built-in # source browser. The htags tool is part of GNU's global source tagging system -# (see http://www.gnu.org/software/global/global.html). You will need version +# (see https://www.gnu.org/software/global/global.html). You will need version # 4.8.6 or higher. # # To use it do the following: # - Install the latest version of global -# - Enable SOURCE_BROWSER and USE_HTAGS in the config file +# - Enable SOURCE_BROWSER and USE_HTAGS in the configuration file # - Make sure the INPUT points to the root of the source tree # - Run doxygen as normal # @@ -1001,7 +1077,7 @@ # Configuration options related to the HTML output #--------------------------------------------------------------------------- -# If the GENERATE_HTML tag is set to YES doxygen will generate HTML output +# If the GENERATE_HTML tag is set to YES, doxygen will generate HTML output # The default value is: YES. GENERATE_HTML = YES @@ -1063,13 +1139,15 @@ HTML_STYLESHEET = -# The HTML_EXTRA_STYLESHEET tag can be used to specify an additional user- -# defined cascading style sheet that is included after the standard style sheets +# The HTML_EXTRA_STYLESHEET tag can be used to specify additional user-defined +# cascading style sheets that are included after the standard style sheets # created by doxygen. Using this option one can overrule certain style aspects. # This is preferred over using HTML_STYLESHEET since it does not replace the -# standard style sheet and is therefor more robust against future updates. -# Doxygen will copy the style sheet file to the output directory. For an example -# see the documentation. +# standard style sheet and is therefore more robust against future updates. +# Doxygen will copy the style sheet files to the output directory. +# Note: The order of the extra style sheet files is of importance (e.g. the last +# style sheet in the list overrules the setting of the previous ones in the +# list). For an example see the documentation. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_EXTRA_STYLESHEET = @@ -1082,12 +1160,12 @@ # files will be copied as-is; there are no commands or markers available. # This tag requires that the tag GENERATE_HTML is set to YES. -HTML_EXTRA_FILES = "doc/InstallationGuide.pdf" +HTML_EXTRA_FILES = # The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen -# will adjust the colors in the stylesheet and background images according to +# will adjust the colors in the style sheet and background images according to # this color. Hue is specified as an angle on a colorwheel, see -# http://en.wikipedia.org/wiki/Hue for more information. For instance the value +# https://en.wikipedia.org/wiki/Hue for more information. For instance the value # 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300 # purple, and 360 is red again. # Minimum value: 0, maximum value: 359, default value: 220. @@ -1116,12 +1194,24 @@ # If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML # page will contain the date and time when the page was generated. Setting this -# to NO can help when comparing the output of multiple runs. -# The default value is: YES. +# to YES can help to show when doxygen was last run and thus if the +# documentation is up to date. +# The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_TIMESTAMP = YES +# If the HTML_DYNAMIC_MENUS tag is set to YES then the generated HTML +# documentation will contain a main index with vertical navigation menus that +# are dynamically created via Javascript. If disabled, the navigation index will +# consists of multiple levels of tabs that are statically embedded in every HTML +# page. Disable this option to support browsers that do not have Javascript, +# like the Qt help browser. +# The default value is: YES. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_DYNAMIC_MENUS = YES + # If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML # documentation will contain sections that can be hidden and shown after the # page has loaded. @@ -1145,13 +1235,13 @@ # If the GENERATE_DOCSET tag is set to YES, additional index files will be # generated that can be used as input for Apple's Xcode 3 integrated development -# environment (see: http://developer.apple.com/tools/xcode/), introduced with -# OSX 10.5 (Leopard). To create a documentation set, doxygen will generate a +# environment (see: https://developer.apple.com/xcode/), introduced with OSX +# 10.5 (Leopard). To create a documentation set, doxygen will generate a # Makefile in the HTML output directory. Running make will produce the docset in # that directory and running make install will install the docset in # ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at -# startup. See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html -# for more information. +# startup. See https://developer.apple.com/library/archive/featuredarticles/Doxy +# genXcode/_index.html for more information. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. @@ -1190,7 +1280,7 @@ # If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three # additional HTML index files: index.hhp, index.hhc, and index.hhk. The # index.hhp is a project file that can be read by Microsoft's HTML Help Workshop -# (see: http://www.microsoft.com/en-us/download/details.aspx?id=21138) on +# (see: https://www.microsoft.com/en-us/download/details.aspx?id=21138) on # Windows. # # The HTML Help Workshop contains a compiler that can convert all HTML output @@ -1213,28 +1303,29 @@ CHM_FILE = # The HHC_LOCATION tag can be used to specify the location (absolute path -# including file name) of the HTML help compiler ( hhc.exe). If non-empty +# including file name) of the HTML help compiler (hhc.exe). If non-empty, # doxygen will try to run the HTML help compiler on the generated index.hhp. # The file has to be specified with full path. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. HHC_LOCATION = -# The GENERATE_CHI flag controls if a separate .chi index file is generated ( -# YES) or that it should be included in the master .chm file ( NO). +# The GENERATE_CHI flag controls if a separate .chi index file is generated +# (YES) or that it should be included in the master .chm file (NO). # The default value is: NO. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. GENERATE_CHI = NO -# The CHM_INDEX_ENCODING is used to encode HtmlHelp index ( hhk), content ( hhc) +# The CHM_INDEX_ENCODING is used to encode HtmlHelp index (hhk), content (hhc) # and project file content. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. CHM_INDEX_ENCODING = -# The BINARY_TOC flag controls whether a binary table of contents is generated ( -# YES) or a normal table of contents ( NO) in the .chm file. +# The BINARY_TOC flag controls whether a binary table of contents is generated +# (YES) or a normal table of contents (NO) in the .chm file. Furthermore it +# enables the Previous and Next buttons. # The default value is: NO. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. @@ -1265,7 +1356,7 @@ # The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help # Project output. For more information please see Qt Help Project / Namespace -# (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#namespace). +# (see: http://doc.qt.io/archives/qt-4.8/qthelpproject.html#namespace). # The default value is: org.doxygen.Project. # This tag requires that the tag GENERATE_QHP is set to YES. @@ -1273,7 +1364,7 @@ # The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt # Help Project output. For more information please see Qt Help Project / Virtual -# Folders (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#virtual- +# Folders (see: http://doc.qt.io/archives/qt-4.8/qthelpproject.html#virtual- # folders). # The default value is: doc. # This tag requires that the tag GENERATE_QHP is set to YES. @@ -1282,7 +1373,7 @@ # If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom # filter to add. For more information please see Qt Help Project / Custom -# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom- +# Filters (see: http://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom- # filters). # This tag requires that the tag GENERATE_QHP is set to YES. @@ -1290,7 +1381,7 @@ # The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the # custom filter to add. For more information please see Qt Help Project / Custom -# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom- +# Filters (see: http://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom- # filters). # This tag requires that the tag GENERATE_QHP is set to YES. @@ -1298,7 +1389,7 @@ # The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this # project's filter section matches. Qt Help Project / Filter Attributes (see: -# http://qt-project.org/doc/qt-4.8/qthelpproject.html#filter-attributes). +# http://doc.qt.io/archives/qt-4.8/qthelpproject.html#filter-attributes). # This tag requires that the tag GENERATE_QHP is set to YES. QHP_SECT_FILTER_ATTRS = @@ -1347,7 +1438,7 @@ # index structure (just like the one that is generated for HTML Help). For this # to work a browser that supports JavaScript, DHTML, CSS and frames is required # (i.e. any modern browser). Windows users are probably better off using the -# HTML help feature. Via custom stylesheets (see HTML_EXTRA_STYLESHEET) one can +# HTML help feature. Via custom style sheets (see HTML_EXTRA_STYLESHEET) one can # further fine-tune the look of the index. As an example, the default style # sheet generated by doxygen has an example that shows how to put an image at # the root of the tree instead of the PROJECT_NAME. Since the tree basically has @@ -1375,7 +1466,7 @@ TREEVIEW_WIDTH = 250 -# When the EXT_LINKS_IN_WINDOW option is set to YES doxygen will open links to +# If the EXT_LINKS_IN_WINDOW option is set to YES, doxygen will open links to # external symbols imported via tag files in a separate window. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. @@ -1391,7 +1482,7 @@ FORMULA_FONTSIZE = 10 -# Use the FORMULA_TRANPARENT tag to determine whether or not the images +# Use the FORMULA_TRANSPARENT tag to determine whether or not the images # generated for formulas are transparent PNGs. Transparent PNGs are not # supported properly for IE 6.0, but are supported on all modern browsers. # @@ -1403,8 +1494,8 @@ FORMULA_TRANSPARENT = YES # Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see -# http://www.mathjax.org) which uses client side Javascript for the rendering -# instead of using prerendered bitmaps. Use this if you do not have LaTeX +# https://www.mathjax.org) which uses client side Javascript for the rendering +# instead of using pre-rendered bitmaps. Use this if you do not have LaTeX # installed or if you want to formulas look prettier in the HTML output. When # enabled you may also need to install MathJax separately and configure the path # to it using the MATHJAX_RELPATH option. @@ -1430,8 +1521,8 @@ # MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax # Content Delivery Network so you can quickly see the result without installing # MathJax. However, it is strongly recommended to install a local copy of -# MathJax from http://www.mathjax.org before deployment. -# The default value is: http://cdn.mathjax.org/mathjax/latest. +# MathJax from https://www.mathjax.org before deployment. +# The default value is: https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/. # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_RELPATH = http://cdn.mathjax.org/mathjax/latest @@ -1474,11 +1565,11 @@ # When the SERVER_BASED_SEARCH tag is enabled the search engine will be # implemented using a web server instead of a web client using Javascript. There -# are two flavours of web server based searching depending on the -# EXTERNAL_SEARCH setting. When disabled, doxygen will generate a PHP script for -# searching and an index file used by the script. When EXTERNAL_SEARCH is -# enabled the indexing and searching needs to be provided by external tools. See -# the section "External Indexing and Searching" for details. +# are two flavors of web server based searching depending on the EXTERNAL_SEARCH +# setting. When disabled, doxygen will generate a PHP script for searching and +# an index file used by the script. When EXTERNAL_SEARCH is enabled the indexing +# and searching needs to be provided by external tools. See the section +# "External Indexing and Searching" for details. # The default value is: NO. # This tag requires that the tag SEARCHENGINE is set to YES. @@ -1490,9 +1581,9 @@ # external search engine pointed to by the SEARCHENGINE_URL option to obtain the # search results. # -# Doxygen ships with an example indexer ( doxyindexer) and search engine +# Doxygen ships with an example indexer (doxyindexer) and search engine # (doxysearch.cgi) which are based on the open source search engine library -# Xapian (see: http://xapian.org/). +# Xapian (see: https://xapian.org/). # # See the section "External Indexing and Searching" for details. # The default value is: NO. @@ -1503,9 +1594,9 @@ # The SEARCHENGINE_URL should point to a search engine hosted by a web server # which will return the search results when EXTERNAL_SEARCH is enabled. # -# Doxygen ships with an example indexer ( doxyindexer) and search engine +# Doxygen ships with an example indexer (doxyindexer) and search engine # (doxysearch.cgi) which are based on the open source search engine library -# Xapian (see: http://xapian.org/). See the section "External Indexing and +# Xapian (see: https://xapian.org/). See the section "External Indexing and # Searching" for details. # This tag requires that the tag SEARCHENGINE is set to YES. @@ -1541,7 +1632,7 @@ # Configuration options related to the LaTeX output #--------------------------------------------------------------------------- -# If the GENERATE_LATEX tag is set to YES doxygen will generate LaTeX output. +# If the GENERATE_LATEX tag is set to YES, doxygen will generate LaTeX output. # The default value is: YES. GENERATE_LATEX = @DOXYFILE_GENERATE_LATEX@ @@ -1557,22 +1648,35 @@ # The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be # invoked. # -# Note that when enabling USE_PDFLATEX this option is only used for generating -# bitmaps for formulas in the HTML output, but not in the Makefile that is -# written to the output directory. -# The default file is: latex. +# Note that when not enabling USE_PDFLATEX the default is latex when enabling +# USE_PDFLATEX the default is pdflatex and when in the later case latex is +# chosen this is overwritten by pdflatex. For specific output languages the +# default can have been set differently, this depends on the implementation of +# the output language. # This tag requires that the tag GENERATE_LATEX is set to YES. LATEX_CMD_NAME = "@LATEX_COMPILER@" # The MAKEINDEX_CMD_NAME tag can be used to specify the command name to generate # index for LaTeX. +# Note: This tag is used in the Makefile / make.bat. +# See also: LATEX_MAKEINDEX_CMD for the part in the generated output file +# (.tex). # The default file is: makeindex. # This tag requires that the tag GENERATE_LATEX is set to YES. MAKEINDEX_CMD_NAME = "@MAKEINDEX_COMPILER@" -# If the COMPACT_LATEX tag is set to YES doxygen generates more compact LaTeX +# The LATEX_MAKEINDEX_CMD tag can be used to specify the command name to +# generate index for LaTeX. +# Note: This tag is used in the generated output file (.tex). +# See also: MAKEINDEX_CMD_NAME for the part in the Makefile / make.bat. +# The default value is: \makeindex. +# This tag requires that the tag GENERATE_LATEX is set to YES. + +LATEX_MAKEINDEX_CMD = \makeindex + +# If the COMPACT_LATEX tag is set to YES, doxygen generates more compact LaTeX # documents. This may be useful for small projects and may help to save some # trees in general. # The default value is: NO. @@ -1590,9 +1694,12 @@ PAPER_TYPE = a4wide # The EXTRA_PACKAGES tag can be used to specify one or more LaTeX package names -# that should be included in the LaTeX output. To get the times font for -# instance you can specify -# EXTRA_PACKAGES=times +# that should be included in the LaTeX output. The package can be specified just +# by its name or with the correct syntax as to be used with the LaTeX +# \usepackage command. To get the times font for instance you can specify : +# EXTRA_PACKAGES=times or EXTRA_PACKAGES={times} +# To use the option intlimits with the amsmath package you can specify: +# EXTRA_PACKAGES=[intlimits]{amsmath} # If left blank no extra packages will be included. # This tag requires that the tag GENERATE_LATEX is set to YES. @@ -1606,23 +1713,36 @@ # # Note: Only use a user-defined header if you know what you are doing! The # following commands have a special meaning inside the header: $title, -# $datetime, $date, $doxygenversion, $projectname, $projectnumber. Doxygen will -# replace them by respectively the title of the page, the current date and time, -# only the current date, the version number of doxygen, the project name (see -# PROJECT_NAME), or the project number (see PROJECT_NUMBER). +# $datetime, $date, $doxygenversion, $projectname, $projectnumber, +# $projectbrief, $projectlogo. Doxygen will replace $title with the empty +# string, for the replacement values of the other commands the user is referred +# to HTML_HEADER. # This tag requires that the tag GENERATE_LATEX is set to YES. LATEX_HEADER = # The LATEX_FOOTER tag can be used to specify a personal LaTeX footer for the # generated LaTeX document. The footer should contain everything after the last -# chapter. If it is left blank doxygen will generate a standard footer. +# chapter. If it is left blank doxygen will generate a standard footer. See +# LATEX_HEADER for more information on how to generate a default footer and what +# special commands can be used inside the footer. # # Note: Only use a user-defined footer if you know what you are doing! # This tag requires that the tag GENERATE_LATEX is set to YES. LATEX_FOOTER = +# The LATEX_EXTRA_STYLESHEET tag can be used to specify additional user-defined +# LaTeX style sheets that are included after the standard style sheets created +# by doxygen. Using this option one can overrule certain style aspects. Doxygen +# will copy the style sheet files to the output directory. +# Note: The order of the extra style sheet files is of importance (e.g. the last +# style sheet in the list overrules the setting of the previous ones in the +# list). +# This tag requires that the tag GENERATE_LATEX is set to YES. + +LATEX_EXTRA_STYLESHEET = + # The LATEX_EXTRA_FILES tag can be used to specify one or more extra images or # other source files which should be copied to the LATEX_OUTPUT output # directory. Note that the files will be copied as-is; there are no commands or @@ -1640,8 +1760,8 @@ PDF_HYPERLINKS = YES -# If the LATEX_PDFLATEX tag is set to YES, doxygen will use pdflatex to generate -# the PDF file directly from the LaTeX files. Set this option to YES to get a +# If the USE_PDFLATEX tag is set to YES, doxygen will use pdflatex to generate +# the PDF file directly from the LaTeX files. Set this option to YES, to get a # higher quality PDF documentation. # The default value is: YES. # This tag requires that the tag GENERATE_LATEX is set to YES. @@ -1676,17 +1796,33 @@ # The LATEX_BIB_STYLE tag can be used to specify the style to use for the # bibliography, e.g. plainnat, or ieeetr. See -# http://en.wikipedia.org/wiki/BibTeX and \cite for more info. +# https://en.wikipedia.org/wiki/BibTeX and \cite for more info. # The default value is: plain. # This tag requires that the tag GENERATE_LATEX is set to YES. LATEX_BIB_STYLE = plain +# If the LATEX_TIMESTAMP tag is set to YES then the footer of each generated +# page will contain the date and time when the page was generated. Setting this +# to NO can help when comparing the output of multiple runs. +# The default value is: NO. +# This tag requires that the tag GENERATE_LATEX is set to YES. + +LATEX_TIMESTAMP = NO + +# The LATEX_EMOJI_DIRECTORY tag is used to specify the (relative or absolute) +# path from which the emoji images will be read. If a relative path is entered, +# it will be relative to the LATEX_OUTPUT directory. If left blank the +# LATEX_OUTPUT directory will be used. +# This tag requires that the tag GENERATE_LATEX is set to YES. + +LATEX_EMOJI_DIRECTORY = + #--------------------------------------------------------------------------- # Configuration options related to the RTF output #--------------------------------------------------------------------------- -# If the GENERATE_RTF tag is set to YES doxygen will generate RTF output. The +# If the GENERATE_RTF tag is set to YES, doxygen will generate RTF output. The # RTF output is optimized for Word 97 and may not look too pretty with other RTF # readers/editors. # The default value is: NO. @@ -1701,7 +1837,7 @@ RTF_OUTPUT = rtf -# If the COMPACT_RTF tag is set to YES doxygen generates more compact RTF +# If the COMPACT_RTF tag is set to YES, doxygen generates more compact RTF # documents. This may be useful for small projects and may help to save some # trees in general. # The default value is: NO. @@ -1721,9 +1857,9 @@ RTF_HYPERLINKS = NO -# Load stylesheet definitions from file. Syntax is similar to doxygen's config -# file, i.e. a series of assignments. You only have to provide replacements, -# missing definitions are set to their default value. +# Load stylesheet definitions from file. Syntax is similar to doxygen's +# configuration file, i.e. a series of assignments. You only have to provide +# replacements, missing definitions are set to their default value. # # See also section "Doxygen usage" for information on how to generate the # default style sheet that doxygen normally uses. @@ -1732,17 +1868,27 @@ RTF_STYLESHEET_FILE = # Set optional variables used in the generation of an RTF document. Syntax is -# similar to doxygen's config file. A template extensions file can be generated -# using doxygen -e rtf extensionFile. +# similar to doxygen's configuration file. A template extensions file can be +# generated using doxygen -e rtf extensionFile. # This tag requires that the tag GENERATE_RTF is set to YES. RTF_EXTENSIONS_FILE = +# If the RTF_SOURCE_CODE tag is set to YES then doxygen will include source code +# with syntax highlighting in the RTF output. +# +# Note that which sources are shown also depends on other settings such as +# SOURCE_BROWSER. +# The default value is: NO. +# This tag requires that the tag GENERATE_RTF is set to YES. + +RTF_SOURCE_CODE = NO + #--------------------------------------------------------------------------- # Configuration options related to the man page output #--------------------------------------------------------------------------- -# If the GENERATE_MAN tag is set to YES doxygen will generate man pages for +# If the GENERATE_MAN tag is set to YES, doxygen will generate man pages for # classes and files. # The default value is: NO. @@ -1766,6 +1912,13 @@ MAN_EXTENSION = .3 +# The MAN_SUBDIR tag determines the name of the directory created within +# MAN_OUTPUT in which the man pages are placed. If defaults to man followed by +# MAN_EXTENSION with the initial . removed. +# This tag requires that the tag GENERATE_MAN is set to YES. + +MAN_SUBDIR = + # If the MAN_LINKS tag is set to YES and doxygen generates man output, then it # will generate one additional man file for each entity documented in the real # man page(s). These additional files only source the real man page, but without @@ -1779,7 +1932,7 @@ # Configuration options related to the XML output #--------------------------------------------------------------------------- -# If the GENERATE_XML tag is set to YES doxygen will generate an XML file that +# If the GENERATE_XML tag is set to YES, doxygen will generate an XML file that # captures the structure of the code including all documentation. # The default value is: NO. @@ -1793,19 +1946,7 @@ XML_OUTPUT = xml -# The XML_SCHEMA tag can be used to specify a XML schema, which can be used by a -# validating XML parser to check the syntax of the XML files. -# This tag requires that the tag GENERATE_XML is set to YES. - -XML_SCHEMA = - -# The XML_DTD tag can be used to specify a XML DTD, which can be used by a -# validating XML parser to check the syntax of the XML files. -# This tag requires that the tag GENERATE_XML is set to YES. - -XML_DTD = - -# If the XML_PROGRAMLISTING tag is set to YES doxygen will dump the program +# If the XML_PROGRAMLISTING tag is set to YES, doxygen will dump the program # listings (including syntax highlighting and cross-referencing information) to # the XML output. Note that enabling this will significantly increase the size # of the XML output. @@ -1814,11 +1955,18 @@ XML_PROGRAMLISTING = YES +# If the XML_NS_MEMB_FILE_SCOPE tag is set to YES, doxygen will include +# namespace members in file scope as well, matching the HTML output. +# The default value is: NO. +# This tag requires that the tag GENERATE_XML is set to YES. + +XML_NS_MEMB_FILE_SCOPE = NO + #--------------------------------------------------------------------------- # Configuration options related to the DOCBOOK output #--------------------------------------------------------------------------- -# If the GENERATE_DOCBOOK tag is set to YES doxygen will generate Docbook files +# If the GENERATE_DOCBOOK tag is set to YES, doxygen will generate Docbook files # that can be used to generate PDF. # The default value is: NO. @@ -1832,14 +1980,23 @@ DOCBOOK_OUTPUT = docbook +# If the DOCBOOK_PROGRAMLISTING tag is set to YES, doxygen will include the +# program listings (including syntax highlighting and cross-referencing +# information) to the DOCBOOK output. Note that enabling this will significantly +# increase the size of the DOCBOOK output. +# The default value is: NO. +# This tag requires that the tag GENERATE_DOCBOOK is set to YES. + +DOCBOOK_PROGRAMLISTING = NO + #--------------------------------------------------------------------------- # Configuration options for the AutoGen Definitions output #--------------------------------------------------------------------------- -# If the GENERATE_AUTOGEN_DEF tag is set to YES doxygen will generate an AutoGen -# Definitions (see http://autogen.sf.net) file that captures the structure of -# the code including all documentation. Note that this feature is still -# experimental and incomplete at the moment. +# If the GENERATE_AUTOGEN_DEF tag is set to YES, doxygen will generate an +# AutoGen Definitions (see http://autogen.sourceforge.net/) file that captures +# the structure of the code including all documentation. Note that this feature +# is still experimental and incomplete at the moment. # The default value is: NO. GENERATE_AUTOGEN_DEF = NO @@ -1848,7 +2005,7 @@ # Configuration options related to the Perl module output #--------------------------------------------------------------------------- -# If the GENERATE_PERLMOD tag is set to YES doxygen will generate a Perl module +# If the GENERATE_PERLMOD tag is set to YES, doxygen will generate a Perl module # file that captures the structure of the code including all documentation. # # Note that this feature is still experimental and incomplete at the moment. @@ -1856,7 +2013,7 @@ GENERATE_PERLMOD = NO -# If the PERLMOD_LATEX tag is set to YES doxygen will generate the necessary +# If the PERLMOD_LATEX tag is set to YES, doxygen will generate the necessary # Makefile rules, Perl scripts and LaTeX code to be able to generate PDF and DVI # output from the Perl module output. # The default value is: NO. @@ -1864,9 +2021,9 @@ PERLMOD_LATEX = NO -# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be nicely +# If the PERLMOD_PRETTY tag is set to YES, the Perl module output will be nicely # formatted so it can be parsed by a human reader. This is useful if you want to -# understand what is going on. On the other hand, if this tag is set to NO the +# understand what is going on. On the other hand, if this tag is set to NO, the # size of the Perl module output will be much smaller and Perl will parse it # just the same. # The default value is: YES. @@ -1886,14 +2043,14 @@ # Configuration options related to the preprocessor #--------------------------------------------------------------------------- -# If the ENABLE_PREPROCESSING tag is set to YES doxygen will evaluate all +# If the ENABLE_PREPROCESSING tag is set to YES, doxygen will evaluate all # C-preprocessor directives found in the sources and include files. # The default value is: YES. ENABLE_PREPROCESSING = YES -# If the MACRO_EXPANSION tag is set to YES doxygen will expand all macro names -# in the source code. If set to NO only conditional compilation will be +# If the MACRO_EXPANSION tag is set to YES, doxygen will expand all macro names +# in the source code. If set to NO, only conditional compilation will be # performed. Macro expansion can be done in a controlled way by setting # EXPAND_ONLY_PREDEF to YES. # The default value is: NO. @@ -1909,7 +2066,7 @@ EXPAND_ONLY_PREDEF = NO -# If the SEARCH_INCLUDES tag is set to YES the includes files in the +# If the SEARCH_INCLUDES tag is set to YES, the include files in the # INCLUDE_PATH will be searched if a #include is found. # The default value is: YES. # This tag requires that the tag ENABLE_PREPROCESSING is set to YES. @@ -1939,7 +2096,7 @@ # recursively expanded use the := operator instead of the = operator. # This tag requires that the tag ENABLE_PREPROCESSING is set to YES. -PREDEFINED = +PREDEFINED = USE_BLACKMAGIC USE_IMAGEMAGICK # If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this # tag can be used to specify a list of macro names that should be expanded. The @@ -1951,9 +2108,9 @@ EXPAND_AS_DEFINED = # If the SKIP_FUNCTION_MACROS tag is set to YES then doxygen's preprocessor will -# remove all refrences to function-like macros that are alone on a line, have an -# all uppercase name, and do not end with a semicolon. Such function macros are -# typically used for boiler-plate code, and will confuse the parser if not +# remove all references to function-like macros that are alone on a line, have +# an all uppercase name, and do not end with a semicolon. Such function macros +# are typically used for boiler-plate code, and will confuse the parser if not # removed. # The default value is: YES. # This tag requires that the tag ENABLE_PREPROCESSING is set to YES. @@ -1973,7 +2130,7 @@ # where loc1 and loc2 can be relative or absolute paths or URLs. See the # section "Linking to external documentation" for more information about the use # of tag files. -# Note: Each tag file must have an unique name (where the name does NOT include +# Note: Each tag file must have a unique name (where the name does NOT include # the path). If a tag file is not located in the directory in which doxygen is # run, you must also specify the path to the tagfile here. @@ -1985,20 +2142,21 @@ GENERATE_TAGFILE = -# If the ALLEXTERNALS tag is set to YES all external class will be listed in the -# class index. If set to NO only the inherited external classes will be listed. +# If the ALLEXTERNALS tag is set to YES, all external class will be listed in +# the class index. If set to NO, only the inherited external classes will be +# listed. # The default value is: NO. ALLEXTERNALS = NO -# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed in -# the modules index. If set to NO, only the current project's groups will be +# If the EXTERNAL_GROUPS tag is set to YES, all external groups will be listed +# in the modules index. If set to NO, only the current project's groups will be # listed. # The default value is: YES. EXTERNAL_GROUPS = YES -# If the EXTERNAL_PAGES tag is set to YES all external pages will be listed in +# If the EXTERNAL_PAGES tag is set to YES, all external pages will be listed in # the related pages index. If set to NO, only the current project's pages will # be listed. # The default value is: YES. @@ -2015,7 +2173,7 @@ # Configuration options related to the dot tool #--------------------------------------------------------------------------- -# If the CLASS_DIAGRAMS tag is set to YES doxygen will generate a class diagram +# If the CLASS_DIAGRAMS tag is set to YES, doxygen will generate a class diagram # (in HTML and LaTeX) for classes with base or super classes. Setting the tag to # NO turns the diagrams off. Note that this option also works with HAVE_DOT # disabled, but it is recommended to install and use dot, since it yields more @@ -2040,7 +2198,7 @@ DIA_PATH = -# If set to YES, the inheritance and collaboration graphs will hide inheritance +# If set to YES the inheritance and collaboration graphs will hide inheritance # and usage relations if the target is undocumented or is not a class. # The default value is: YES. @@ -2065,7 +2223,7 @@ DOT_NUM_THREADS = 0 -# When you want a differently looking font n the dot files that doxygen +# When you want a differently looking font in the dot files that doxygen # generates you can specify the font name using DOT_FONTNAME. You need to make # sure dot is able to find the font, which can be done by putting it in a # standard location or by setting the DOTFONTPATH environment variable or by @@ -2073,7 +2231,7 @@ # The default value is: Helvetica. # This tag requires that the tag HAVE_DOT is set to YES. -DOT_FONTNAME = +DOT_FONTNAME = # The DOT_FONTSIZE tag can be used to set the size (in points) of the font of # dot graphs. @@ -2113,7 +2271,7 @@ GROUP_GRAPHS = YES -# If the UML_LOOK tag is set to YES doxygen will generate inheritance and +# If the UML_LOOK tag is set to YES, doxygen will generate inheritance and # collaboration diagrams in a style similar to the OMG's Unified Modeling # Language. # The default value is: NO. @@ -2165,7 +2323,8 @@ # # Note that enabling this option will significantly increase the time of a run. # So in most cases it will be better to enable call graphs for selected -# functions only using the \callgraph command. +# functions only using the \callgraph command. Disabling a call graph can be +# accomplished by means of the command \hidecallgraph. # The default value is: NO. # This tag requires that the tag HAVE_DOT is set to YES. @@ -2176,7 +2335,8 @@ # # Note that enabling this option will significantly increase the time of a run. # So in most cases it will be better to enable caller graphs for selected -# functions only using the \callergraph command. +# functions only using the \callergraph command. Disabling a caller graph can be +# accomplished by means of the command \hidecallergraph. # The default value is: NO. # This tag requires that the tag HAVE_DOT is set to YES. @@ -2199,15 +2359,19 @@ DIRECTORY_GRAPH = YES # The DOT_IMAGE_FORMAT tag can be used to set the image format of the images -# generated by dot. +# generated by dot. For an explanation of the image formats see the section +# output formats in the documentation of the dot tool (Graphviz (see: +# http://www.graphviz.org/)). # Note: If you choose svg you need to set HTML_FILE_EXTENSION to xhtml in order # to make the SVG files visible in IE 9+ (other browsers do not have this # requirement). -# Possible values are: png, jpg, gif and svg. +# Possible values are: png, jpg, gif, svg, png:gd, png:gd:gd, png:cairo, +# png:cairo:gd, png:cairo:cairo, png:cairo:gdiplus, png:gdiplus and +# png:gdiplus:gdiplus. # The default value is: png. # This tag requires that the tag HAVE_DOT is set to YES. -DOT_IMAGE_FORMAT = png +DOT_IMAGE_FORMAT = svg # If DOT_IMAGE_FORMAT is set to svg, then this option can be set to YES to # enable generation of interactive SVG images that allow zooming and panning. @@ -2219,7 +2383,7 @@ # The default value is: NO. # This tag requires that the tag HAVE_DOT is set to YES. -INTERACTIVE_SVG = NO +INTERACTIVE_SVG = YES # The DOT_PATH tag can be used to specify the path where the dot tool can be # found. If left blank, it is assumed the dot tool can be found in the path. @@ -2246,6 +2410,24 @@ DIAFILE_DIRS = +# When using plantuml, the PLANTUML_JAR_PATH tag should be used to specify the +# path where java can find the plantuml.jar file. If left blank, it is assumed +# PlantUML is not used or called during a preprocessing step. Doxygen will +# generate a warning when it encounters a \startuml command in this case and +# will not generate output for the diagram. + +PLANTUML_JAR_PATH = + +# When using plantuml, the PLANTUML_CFG_FILE tag can be used to specify a +# configuration file for plantuml. + +PLANTUML_CFG_FILE = + +# When using plantuml, the specified paths are searched for files specified by +# the !include statement in a plantuml block. + +PLANTUML_INCLUDE_PATH = + # The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of nodes # that will be shown in the graph. If the number of nodes in a graph becomes # larger than this value, doxygen will truncate the graph, which is visualized @@ -2282,7 +2464,7 @@ DOT_TRANSPARENT = YES -# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output +# Set the DOT_MULTI_TARGETS tag to YES to allow dot to generate multiple output # files in one run (i.e. multiple -o and -T options on the command line). This # makes dot run faster, but since only newer versions of dot (>1.8.10) support # this, this feature is disabled by default. @@ -2299,7 +2481,7 @@ GENERATE_LEGEND = YES -# If the DOT_CLEANUP tag is set to YES doxygen will remove the intermediate dot +# If the DOT_CLEANUP tag is set to YES, doxygen will remove the intermediate dot # files that are used to generate the various graphs. # The default value is: YES. # This tag requires that the tag HAVE_DOT is set to YES. diff -Nru libopenshot-0.2.2+dfsg1/.gitignore libopenshot-0.2.5+dfsg1/.gitignore --- libopenshot-0.2.2+dfsg1/.gitignore 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/.gitignore 2020-03-03 08:00:06.000000000 +0000 @@ -1,6 +1,9 @@ -build/ -build/* +/build* *.DS_Store .pydevproject .settings -.idea/* \ No newline at end of file +.idea/* +.project +.cproject +/.metadata/ +*~ diff -Nru libopenshot-0.2.2+dfsg1/.gitlab-ci.yml libopenshot-0.2.5+dfsg1/.gitlab-ci.yml --- libopenshot-0.2.2+dfsg1/.gitlab-ci.yml 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/.gitlab-ci.yml 2020-03-03 08:00:06.000000000 +0000 @@ -17,12 +17,17 @@ - export LIBOPENSHOT_AUDIO_DIR=$CI_PROJECT_DIR/build/install-x64 - mkdir -p build; cd build; - mkdir -p install-x64/python; - - cmake -D"CMAKE_INSTALL_PREFIX:PATH=$CI_PROJECT_DIR/build/install-x64" -D"CMAKE_BUILD_TYPE:STRING=Release" ../ + - cmake -DCMAKE_VERBOSE_MAKEFILE:BOOL=ON -D"CMAKE_INSTALL_PREFIX:PATH=$CI_PROJECT_DIR/build/install-x64" -D"CMAKE_BUILD_TYPE:STRING=Release" -D"USE_SYSTEM_JSONCPP=0" ../ - make - make install - - mv /usr/local/lib/python3.4/dist-packages/*openshot* install-x64/python + - make doc + - ~/auto-update-docs "$CI_PROJECT_DIR/build" "$CI_COMMIT_REF_NAME" + - mv install-x64/lib/python3.4/site-packages/*openshot* install-x64/python - echo -e "CI_PROJECT_NAME:$CI_PROJECT_NAME\nCI_COMMIT_REF_NAME:$CI_COMMIT_REF_NAME\nCI_COMMIT_SHA:$CI_COMMIT_SHA\nCI_JOB_ID:$CI_JOB_ID" > "install-x64/share/$CI_PROJECT_NAME" + - git log $(git describe --tags --abbrev=0 @^)..@ --oneline --pretty=format:"%C(auto,yellow)%h%C(auto,magenta)% %C(auto,blue)%>(12,trunc)%ad %C(auto,green)%<(25,trunc)%aN%C(auto,reset)%s%C(auto,red)% gD% D" --date=short > "install-x64/share/$CI_PROJECT_NAME.log" when: always + except: + - tags tags: - linux @@ -41,64 +46,73 @@ - export LIBOPENSHOT_AUDIO_DIR=$CI_PROJECT_DIR/build/install-x64 - mkdir -p build; cd build; - mkdir -p install-x64/python; - - cmake -D"CMAKE_INSTALL_PREFIX:PATH=$CI_PROJECT_DIR/build/install-x64" -DCMAKE_CXX_COMPILER=/usr/local/opt/gcc48/bin/g++-4.8 -DCMAKE_C_COMPILER=/usr/local/opt/gcc48/bin/gcc-4.8 -DCMAKE_PREFIX_PATH=/usr/local/qt5/5.5/clang_64 -DPYTHON_INCLUDE_DIR=/Library/Frameworks/Python.framework/Versions/3.6/include/python3.6m -DPYTHON_LIBRARY=/Library/Frameworks/Python.framework/Versions/3.6/lib/libpython3.6.dylib -DPython_FRAMEWORKS=/Library/Frameworks/Python.framework/ -D"CMAKE_BUILD_TYPE:STRING=Debug" -D"CMAKE_OSX_SYSROOT=/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.9.sdk" -D"CMAKE_OSX_DEPLOYMENT_TARGET=10.9" -D"CMAKE_INSTALL_RPATH_USE_LINK_PATH=1" -D"ENABLE_RUBY=0" ../ + - cmake -DCMAKE_VERBOSE_MAKEFILE:BOOL=ON -D"CMAKE_INSTALL_PREFIX:PATH=$CI_PROJECT_DIR/build/install-x64" -DCMAKE_CXX_COMPILER=/usr/local/opt/gcc48/bin/g++-4.8 -DCMAKE_C_COMPILER=/usr/local/opt/gcc48/bin/gcc-4.8 -DCMAKE_PREFIX_PATH=/usr/local/qt5/5.5/clang_64 -DPYTHON_INCLUDE_DIR=/Library/Frameworks/Python.framework/Versions/3.6/include/python3.6m -DPYTHON_LIBRARY=/Library/Frameworks/Python.framework/Versions/3.6/lib/libpython3.6.dylib -DPython_FRAMEWORKS=/Library/Frameworks/Python.framework/ -D"CMAKE_BUILD_TYPE:STRING=Release" -D"CMAKE_OSX_SYSROOT=/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.9.sdk" -D"CMAKE_OSX_DEPLOYMENT_TARGET=10.9" -D"CMAKE_INSTALL_RPATH_USE_LINK_PATH=1" -D"ENABLE_RUBY=0" ../ - make - make install - - mv /Library/Frameworks/Python.framework/Versions/3.6/lib/python3.6/site-packages/*openshot* install-x64/python + - mv install-x64/lib/python3.6/site-packages/*openshot* install-x64/python - echo -e "CI_PROJECT_NAME:$CI_PROJECT_NAME\nCI_COMMIT_REF_NAME:$CI_COMMIT_REF_NAME\nCI_COMMIT_SHA:$CI_COMMIT_SHA\nCI_JOB_ID:$CI_JOB_ID" > "install-x64/share/$CI_PROJECT_NAME" + - git log $(git describe --tags --abbrev=0 @^)..@ --oneline --pretty=format:"%C(auto,yellow)%h%C(auto,magenta)% %C(auto,blue)%>(12,trunc)%ad %C(auto,green)%<(25,trunc)%aN%C(auto,reset)%s%C(auto,red)% gD% D" --date=short > "install-x64/share/$CI_PROJECT_NAME.log" when: always + except: + - tags tags: - mac -windows-builder-x86: +windows-builder-x64: stage: build-libopenshot artifacts: expire_in: 6 months paths: - - build\install-x86\* + - build\install-x64\* script: - - try { Invoke-WebRequest -Uri "http://gitlab.openshot.org/OpenShot/libopenshot-audio/-/jobs/artifacts/$CI_COMMIT_REF_NAME/download?job=windows-builder-x86" -Headers @{"PRIVATE-TOKEN"="$ACCESS_TOKEN"} -OutFile "artifacts.zip" } catch { $_.Exception.Response.StatusCode.Value__ } - - if (-not (Test-Path "artifacts.zip")) { Invoke-WebRequest -Uri "http://gitlab.openshot.org/OpenShot/libopenshot-audio/-/jobs/artifacts/develop/download?job=windows-builder-x86" -Headers @{"PRIVATE-TOKEN"="$ACCESS_TOKEN"} -OutFile "artifacts.zip" } + - try { Invoke-WebRequest -Uri "http://gitlab.openshot.org/OpenShot/libopenshot-audio/-/jobs/artifacts/$CI_COMMIT_REF_NAME/download?job=windows-builder-x64" -Headers @{"PRIVATE-TOKEN"="$ACCESS_TOKEN"} -OutFile "artifacts.zip" } catch { $_.Exception.Response.StatusCode.Value__ } + - if (-not (Test-Path "artifacts.zip")) { Invoke-WebRequest -Uri "http://gitlab.openshot.org/OpenShot/libopenshot-audio/-/jobs/artifacts/develop/download?job=windows-builder-x64" -Headers @{"PRIVATE-TOKEN"="$ACCESS_TOKEN"} -OutFile "artifacts.zip" } - Expand-Archive -Path artifacts.zip -DestinationPath . - - $env:LIBOPENSHOT_AUDIO_DIR = "$CI_PROJECT_DIR\build\install-x86" - - $env:UNITTEST_DIR = "C:\msys32\usr" - - $env:ZMQDIR = "C:\msys32\usr" - - $env:Path = "C:\msys32\mingw32\bin;C:\msys32\mingw32\lib;C:\msys32\usr\lib\cmake\UnitTest++;C:\msys32\home\jonathan\depot_tools;C:\msys32\usr;C:\msys32\usr\lib;" + $env:Path; + - $env:LIBOPENSHOT_AUDIO_DIR = "$CI_PROJECT_DIR\build\install-x64" + - $env:UNITTEST_DIR = "C:\msys64\usr" + - $env:RESVGDIR = "C:\msys64\usr" + - $env:Path = "C:\msys64\mingw64\bin;C:\msys64\mingw64\lib;C:\msys64\usr\lib\cmake\UnitTest++;C:\msys64\home\jonathan\depot_tools;C:\msys64\usr;C:\msys64\usr\lib;" + $env:Path; - New-Item -ItemType Directory -Force -Path build - - New-Item -ItemType Directory -Force -Path build\install-x86\python + - New-Item -ItemType Directory -Force -Path build\install-x64\python - cd build - - cmake -D"CMAKE_INSTALL_PREFIX:PATH=$CI_PROJECT_DIR\build\install-x86" -G "MinGW Makefiles" -D"CMAKE_BUILD_TYPE:STRING=Release" -D"CMAKE_CXX_FLAGS=-m32" -D"CMAKE_EXE_LINKER_FLAGS=-Wl,--large-address-aware" -D"CMAKE_C_FLAGS=-m32" ../ + - cmake -DCMAKE_VERBOSE_MAKEFILE:BOOL=ON -D"CMAKE_INSTALL_PREFIX:PATH=$CI_PROJECT_DIR\build\install-x64" -G "MSYS Makefiles" -DCMAKE_MAKE_PROGRAM=mingw32-make -D"CMAKE_BUILD_TYPE:STRING=Release" ../ - mingw32-make install - - Move-Item -Force -path "C:\msys32\mingw32\lib\python3.6\site-packages\*openshot*" -destination "install-x86\python\" - - cp src\libopenshot.dll install-x86\lib - - New-Item -path "install-x86/share/" -Name "$CI_PROJECT_NAME" -Value "CI_PROJECT_NAME:$CI_PROJECT_NAME`nCI_COMMIT_REF_NAME:$CI_COMMIT_REF_NAME`nCI_COMMIT_SHA:$CI_COMMIT_SHA`nCI_JOB_ID:$CI_JOB_ID" -ItemType file -force + - Move-Item -Force -path "install-x64\lib\python3.7\site-packages\*openshot*" -destination "install-x64\python\" + - New-Item -path "install-x64/share/" -Name "$CI_PROJECT_NAME" -Value "CI_PROJECT_NAME:$CI_PROJECT_NAME`nCI_COMMIT_REF_NAME:$CI_COMMIT_REF_NAME`nCI_COMMIT_SHA:$CI_COMMIT_SHA`nCI_JOB_ID:$CI_JOB_ID" -ItemType file -force + - $PREV_GIT_LABEL=(git describe --tags --abbrev=0 '@^') + - git log "$PREV_GIT_LABEL..@" --oneline --pretty=format:"%C(auto,yellow)%h%C(auto,magenta)% %C(auto,blue)%>(12,trunc)%ad %C(auto,green)%<(25,trunc)%aN%C(auto,reset)%s%C(auto,red)% gD% D" --date=short > "install-x64/share/$CI_PROJECT_NAME.log" when: always + except: + - tags tags: - windows -windows-builder-x64: +windows-builder-x86: stage: build-libopenshot artifacts: expire_in: 6 months paths: - - build\install-x64\* + - build\install-x86\* script: - - try { Invoke-WebRequest -Uri "http://gitlab.openshot.org/OpenShot/libopenshot-audio/-/jobs/artifacts/$CI_COMMIT_REF_NAME/download?job=windows-builder-x64" -Headers @{"PRIVATE-TOKEN"="$ACCESS_TOKEN"} -OutFile "artifacts.zip" } catch { $_.Exception.Response.StatusCode.Value__ } - - if (-not (Test-Path "artifacts.zip")) { Invoke-WebRequest -Uri "http://gitlab.openshot.org/OpenShot/libopenshot-audio/-/jobs/artifacts/develop/download?job=windows-builder-x64" -Headers @{"PRIVATE-TOKEN"="$ACCESS_TOKEN"} -OutFile "artifacts.zip" } + - try { Invoke-WebRequest -Uri "http://gitlab.openshot.org/OpenShot/libopenshot-audio/-/jobs/artifacts/$CI_COMMIT_REF_NAME/download?job=windows-builder-x86" -Headers @{"PRIVATE-TOKEN"="$ACCESS_TOKEN"} -OutFile "artifacts.zip" } catch { $_.Exception.Response.StatusCode.Value__ } + - if (-not (Test-Path "artifacts.zip")) { Invoke-WebRequest -Uri "http://gitlab.openshot.org/OpenShot/libopenshot-audio/-/jobs/artifacts/develop/download?job=windows-builder-x86" -Headers @{"PRIVATE-TOKEN"="$ACCESS_TOKEN"} -OutFile "artifacts.zip" } - Expand-Archive -Path artifacts.zip -DestinationPath . - - $env:LIBOPENSHOT_AUDIO_DIR = "$CI_PROJECT_DIR\build\install-x64" - - $env:UNITTEST_DIR = "C:\msys64\usr" - - $env:ZMQDIR = "C:\msys64\usr" - - $env:Path = "C:\msys64\mingw64\bin;C:\msys64\mingw64\lib;C:\msys64\usr\lib\cmake\UnitTest++;C:\msys64\home\jonathan\depot_tools;C:\msys64\usr;C:\msys64\usr\lib;" + $env:Path; + - $env:LIBOPENSHOT_AUDIO_DIR = "$CI_PROJECT_DIR\build\install-x86" + - $env:UNITTEST_DIR = "C:\msys32\usr" + - $env:RESVGDIR = "C:\msys32\usr" + - $env:Path = "C:\msys32\mingw32\bin;C:\msys32\mingw32\lib;C:\msys32\usr\lib\cmake\UnitTest++;C:\msys32\home\jonathan\depot_tools;C:\msys32\usr;C:\msys32\usr\lib;" + $env:Path; - New-Item -ItemType Directory -Force -Path build - - New-Item -ItemType Directory -Force -Path build\install-x64\python + - New-Item -ItemType Directory -Force -Path build\install-x86\python - cd build - - cmake -D"CMAKE_INSTALL_PREFIX:PATH=$CI_PROJECT_DIR\build\install-x64" -G "MinGW Makefiles" -D"CMAKE_BUILD_TYPE:STRING=Release" ../ + - cmake -DCMAKE_VERBOSE_MAKEFILE:BOOL=ON -D"CMAKE_INSTALL_PREFIX:PATH=$CI_PROJECT_DIR\build\install-x86" -G "MSYS Makefiles" -DCMAKE_MAKE_PROGRAM=mingw32-make -D"CMAKE_BUILD_TYPE:STRING=Release" -D"CMAKE_CXX_FLAGS=-m32" -D"CMAKE_EXE_LINKER_FLAGS=-Wl,--large-address-aware" -D"CMAKE_C_FLAGS=-m32" ../ - mingw32-make install - - Move-Item -Force -path "C:\msys64\mingw64\lib\python3.6\site-packages\*openshot*" -destination "install-x64\python\" - - cp src\libopenshot.dll install-x64\lib - - New-Item -path "install-x64/share/" -Name "$CI_PROJECT_NAME" -Value "CI_PROJECT_NAME:$CI_PROJECT_NAME`nCI_COMMIT_REF_NAME:$CI_COMMIT_REF_NAME`nCI_COMMIT_SHA:$CI_COMMIT_SHA`nCI_JOB_ID:$CI_JOB_ID" -ItemType file -force + - Move-Item -Force -path "install-x86\lib\python3.7\site-packages\*openshot*" -destination "install-x86\python\" + - New-Item -path "install-x86/share/" -Name "$CI_PROJECT_NAME" -Value "CI_PROJECT_NAME:$CI_PROJECT_NAME`nCI_COMMIT_REF_NAME:$CI_COMMIT_REF_NAME`nCI_COMMIT_SHA:$CI_COMMIT_SHA`nCI_JOB_ID:$CI_JOB_ID" -ItemType file -force + - $PREV_GIT_LABEL=(git describe --tags --abbrev=0 '@^') + - git log "$PREV_GIT_LABEL..@" --oneline --pretty=format:"%C(auto,yellow)%h%C(auto,magenta)% %C(auto,blue)%>(12,trunc)%ad %C(auto,green)%<(25,trunc)%aN%C(auto,reset)%s%C(auto,red)% gD% D" --date=short > "install-x86/share/$CI_PROJECT_NAME.log" when: always + except: + - tags tags: - windows @@ -108,5 +122,7 @@ - "curl -X POST -F token=$OPENSHOT_QT_PIPELINE_TOKEN -F ref=$CI_COMMIT_REF_NAME http://gitlab.openshot.org/api/v4/projects/3/trigger/pipeline" when: always dependencies: [] + except: + - tags tags: - gitlab diff -Nru libopenshot-0.2.2+dfsg1/include/AudioBufferSource.h libopenshot-0.2.5+dfsg1/include/AudioBufferSource.h --- libopenshot-0.2.2+dfsg1/include/AudioBufferSource.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/AudioBufferSource.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for AudioBufferSource class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -28,18 +31,8 @@ #ifndef OPENSHOT_AUDIOBUFFERSOURCE_H #define OPENSHOT_AUDIOBUFFERSOURCE_H -/// Do not include the juce unittest headers, because it collides with unittest++ -#define __JUCE_UNITTEST_JUCEHEADER__ - -#ifndef _NDEBUG - /// Define NO debug for JUCE on mac os - #define _NDEBUG -#endif - #include -#include "JuceLibraryCode/JuceHeader.h" - -using namespace std; +#include "JuceHeader.h" /// This namespace is the default namespace for all code in the openshot library namespace openshot @@ -51,25 +44,25 @@ * The JUCE library cannot play audio directly from an AudioSampleBuffer, so this class exposes * an AudioSampleBuffer as a AudioSource, so that JUCE can play the audio. */ - class AudioBufferSource : public PositionableAudioSource + class AudioBufferSource : public juce::PositionableAudioSource { private: int position; int start; bool repeat; - AudioSampleBuffer *buffer; + juce::AudioSampleBuffer *buffer; public: /// @brief Default constructor /// @param audio_buffer This buffer contains the samples you want to play through JUCE. - AudioBufferSource(AudioSampleBuffer *audio_buffer); + AudioBufferSource(juce::AudioSampleBuffer *audio_buffer); /// Destructor ~AudioBufferSource(); /// @brief Get the next block of audio samples /// @param info This struct informs us of which samples are needed next. - void getNextAudioBlock (const AudioSourceChannelInfo& info); + void getNextAudioBlock (const juce::AudioSourceChannelInfo& info); /// Prepare to play this audio source void prepareToPlay(int, double); @@ -79,13 +72,13 @@ /// @brief Set the next read position of this source /// @param newPosition The sample # to start reading from - void setNextReadPosition (int64 newPosition); + void setNextReadPosition (juce::int64 newPosition); /// Get the next read position of this source - int64 getNextReadPosition() const; + juce::int64 getNextReadPosition() const; /// Get the total length (in samples) of this audio source - int64 getTotalLength() const; + juce::int64 getTotalLength() const; /// Determines if this audio source should repeat when it reaches the end bool isLooping() const; @@ -95,7 +88,7 @@ void setLooping (bool shouldLoop); /// Update the internal buffer used by this source - void setBuffer (AudioSampleBuffer *audio_buffer); + void setBuffer (juce::AudioSampleBuffer *audio_buffer); }; } diff -Nru libopenshot-0.2.2+dfsg1/include/AudioDeviceInfo.h libopenshot-0.2.5+dfsg1/include/AudioDeviceInfo.h --- libopenshot-0.2.2+dfsg1/include/AudioDeviceInfo.h 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/AudioDeviceInfo.h 2020-03-03 08:00:06.000000000 +0000 @@ -0,0 +1,47 @@ +/** + * @file + * @brief Header file for Audio Device Info struct + * @author Jonathan Thomas + * + * @ref License + */ + +/* LICENSE + * + * Copyright (c) 2008-2019 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#ifndef OPENSHOT_AUDIODEVICEINFO_H +#define OPENSHOT_AUDIODEVICEINFO_H + + +/** + * @brief This struct hold information about Audio Devices + * + * The type and name of the audio device. + */ +namespace openshot { + struct AudioDeviceInfo + { + std::string name; + std::string type; + }; +} +#endif diff -Nru libopenshot-0.2.2+dfsg1/include/AudioReaderSource.h libopenshot-0.2.5+dfsg1/include/AudioReaderSource.h --- libopenshot-0.2.2+dfsg1/include/AudioReaderSource.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/AudioReaderSource.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for AudioReaderSource class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -28,19 +31,9 @@ #ifndef OPENSHOT_AUDIOREADERSOURCE_H #define OPENSHOT_AUDIOREADERSOURCE_H -/// Do not include the juce unittest headers, because it collides with unittest++ -#define __JUCE_UNITTEST_JUCEHEADER__ - -#ifndef _NDEBUG - /// Define NO debug for JUCE on mac os - #define _NDEBUG -#endif - #include -#include "JuceLibraryCode/JuceHeader.h" #include "ReaderBase.h" - -using namespace std; +#include "JuceHeader.h" /// This namespace is the default namespace for all code in the openshot library namespace openshot @@ -51,13 +44,13 @@ * * This allows any reader to play audio through JUCE (our audio framework). */ - class AudioReaderSource : public PositionableAudioSource + class AudioReaderSource : public juce::PositionableAudioSource { private: int position; /// The position of the audio source (index of buffer) bool repeat; /// Repeat the audio source when finished int size; /// The size of the internal buffer - AudioSampleBuffer *buffer; /// The audio sample buffer + juce::AudioSampleBuffer *buffer; /// The audio sample buffer int speed; /// The speed and direction to playback a reader (1=normal, 2=fast, 3=faster, -1=rewind, etc...) ReaderBase *reader; /// The reader to pull samples from @@ -87,7 +80,7 @@ /// @brief Get the next block of audio samples /// @param info This struct informs us of which samples are needed next. - void getNextAudioBlock (const AudioSourceChannelInfo& info); + void getNextAudioBlock (const juce::AudioSourceChannelInfo& info); /// Prepare to play this audio source void prepareToPlay(int, double); @@ -97,13 +90,13 @@ /// @brief Set the next read position of this source /// @param newPosition The sample # to start reading from - void setNextReadPosition (int64 newPosition); + void setNextReadPosition (juce::int64 newPosition); /// Get the next read position of this source - int64 getNextReadPosition() const; + juce::int64 getNextReadPosition() const; /// Get the total length (in samples) of this audio source - int64 getTotalLength() const; + juce::int64 getTotalLength() const; /// Determines if this audio source should repeat when it reaches the end bool isLooping() const; @@ -113,7 +106,7 @@ void setLooping (bool shouldLoop); /// Update the internal buffer used by this source - void setBuffer (AudioSampleBuffer *audio_buffer); + void setBuffer (juce::AudioSampleBuffer *audio_buffer); const ReaderInfo & getReaderInfo() const { return reader->info; } diff -Nru libopenshot-0.2.2+dfsg1/include/AudioResampler.h libopenshot-0.2.5+dfsg1/include/AudioResampler.h --- libopenshot-0.2.2+dfsg1/include/AudioResampler.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/AudioResampler.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for AudioResampler class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -28,19 +31,9 @@ #ifndef OPENSHOT_RESAMPLER_H #define OPENSHOT_RESAMPLER_H -/// Do not include the juce unittest headers, because it collides with unittest++ -#ifndef __JUCE_UNITTEST_JUCEHEADER__ - #define __JUCE_UNITTEST_JUCEHEADER__ -#endif - -#ifndef _NDEBUG - // Define NO debug for JUCE on mac os - #define _NDEBUG -#endif - -#include "JuceLibraryCode/JuceHeader.h" #include "AudioBufferSource.h" #include "Exceptions.h" +#include "JuceHeader.h" namespace openshot { @@ -52,11 +45,11 @@ */ class AudioResampler { private: - AudioSampleBuffer *buffer; - AudioSampleBuffer *resampled_buffer; - AudioBufferSource *buffer_source; - ResamplingAudioSource *resample_source; - AudioSourceChannelInfo resample_callback_buffer; + juce::AudioSampleBuffer *buffer; + juce::AudioSampleBuffer *resampled_buffer; + openshot::AudioBufferSource *buffer_source; + juce::ResamplingAudioSource *resample_source; + juce::AudioSourceChannelInfo resample_callback_buffer; int num_of_samples; int new_num_of_samples; @@ -75,15 +68,15 @@ /// @param new_buffer The buffer of audio samples needing to be resampled /// @param sample_rate The original sample rate of the buffered samples /// @param new_sample_rate The requested sample rate you need - void SetBuffer(AudioSampleBuffer *new_buffer, double sample_rate, double new_sample_rate); + void SetBuffer(juce::AudioSampleBuffer *new_buffer, double sample_rate, double new_sample_rate); /// @brief Sets the audio buffer and key settings /// @param new_buffer The buffer of audio samples needing to be resampled /// @param ratio The multiplier that needs to be applied to the sample rate (this is how resampling happens) - void SetBuffer(AudioSampleBuffer *new_buffer, double ratio); + void SetBuffer(juce::AudioSampleBuffer *new_buffer, double ratio); /// Get the resampled audio buffer - AudioSampleBuffer* GetResampledBuffer(); + juce::AudioSampleBuffer* GetResampledBuffer(); }; } diff -Nru libopenshot-0.2.2+dfsg1/include/CacheBase.h libopenshot-0.2.5+dfsg1/include/CacheBase.h --- libopenshot-0.2.2+dfsg1/include/CacheBase.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/CacheBase.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for CacheBase class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -29,6 +32,7 @@ #define OPENSHOT_CACHE_BASE_H #include +#include #include "Frame.h" #include "Exceptions.h" #include "Json.h" @@ -45,11 +49,11 @@ class CacheBase { protected: - string cache_type; ///< This is a friendly type name of the derived cache instance + std::string cache_type; ///< This is a friendly type name of the derived cache instance int64_t max_bytes; ///< This is the max number of bytes to cache (0 = no limit) /// Section lock for multiple threads - CriticalSection *cacheCriticalSection; + juce::CriticalSection *cacheCriticalSection; public: @@ -62,7 +66,7 @@ /// @brief Add a Frame to the cache /// @param frame The openshot::Frame object needing to be cached. - virtual void Add(std::shared_ptr frame) = 0; + virtual void Add(std::shared_ptr frame) = 0; /// Clear the cache of all frames virtual void Clear() = 0; @@ -72,13 +76,13 @@ /// @brief Get a frame from the cache /// @param frame_number The frame number of the cached frame - virtual std::shared_ptr GetFrame(int64_t frame_number) = 0; + virtual std::shared_ptr GetFrame(int64_t frame_number) = 0; /// Gets the maximum bytes value virtual int64_t GetBytes() = 0; /// Get the smallest frame number - virtual std::shared_ptr GetSmallestFrame() = 0; + virtual std::shared_ptr GetSmallestFrame() = 0; /// @brief Remove a specific frame /// @param frame_number The frame number of the cached frame @@ -105,10 +109,11 @@ void SetMaxBytesFromInfo(int64_t number_of_frames, int width, int height, int sample_rate, int channels); /// Get and Set JSON methods - virtual string Json() = 0; ///< Generate JSON string of this object - virtual void SetJson(string value) = 0; ///< Load JSON string into this object - virtual Json::Value JsonValue() = 0; ///< Generate Json::JsonValue for this object - virtual void SetJsonValue(Json::Value root) = 0; ///< Load Json::JsonValue into this object + virtual std::string Json() = 0; ///< Generate JSON string of this object + virtual void SetJson(const std::string value) = 0; ///< Load JSON string into this object + virtual Json::Value JsonValue() = 0; ///< Generate Json::Value for this object + virtual void SetJsonValue(const Json::Value root) = 0; ///< Load Json::Value into this object + virtual ~CacheBase() = default; }; diff -Nru libopenshot-0.2.2+dfsg1/include/CacheDisk.h libopenshot-0.2.5+dfsg1/include/CacheDisk.h --- libopenshot-0.2.2+dfsg1/include/CacheDisk.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/CacheDisk.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for CacheDisk class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -50,24 +53,24 @@ class CacheDisk : public CacheBase { private: QDir path; ///< This is the folder path of the cache directory - map frames; ///< This map holds the frame number and Frame objects - deque frame_numbers; ///< This queue holds a sequential list of cached Frame numbers - string image_format; + std::map frames; ///< This map holds the frame number and Frame objects + std::deque frame_numbers; ///< This queue holds a sequential list of cached Frame numbers + std::string image_format; float image_quality; float image_scale; int64_t frame_size_bytes; ///< The size of the cached frame in bytes bool needs_range_processing; ///< Something has changed, and the range data needs to be re-calculated - string json_ranges; ///< JSON ranges of frame numbers - vector ordered_frame_numbers; ///< Ordered list of frame numbers used by cache - map frame_ranges; ///< This map holds the ranges of frames, useful for quickly displaying the contents of the cache + std::string json_ranges; ///< JSON ranges of frame numbers + std::vector ordered_frame_numbers; ///< Ordered list of frame numbers used by cache + std::map frame_ranges; ///< This map holds the ranges of frames, useful for quickly displaying the contents of the cache int64_t range_version; ///< The version of the JSON range data (incremented with each change) /// Clean up cached frames that exceed the max number of bytes void CleanUp(); /// Init path directory - void InitPath(string cache_path); + void InitPath(std::string cache_path); /// Calculate ranges of frames void CalculateRanges(); @@ -78,7 +81,7 @@ /// @param format The image format for disk caching (ppm, jpg, png) /// @param quality The quality of the image (1.0=highest quality/slowest speed, 0.0=worst quality/fastest speed) /// @param scale The scale factor for the preview images (1.0 = original size, 0.5=half size, 0.25=quarter size, etc...) - CacheDisk(string cache_path, string format, float quality, float scale); + CacheDisk(std::string cache_path, std::string format, float quality, float scale); /// @brief Constructor that sets the max bytes to cache /// @param cache_path The folder path of the cache directory (empty string = /tmp/preview-cache/) @@ -86,14 +89,14 @@ /// @param quality The quality of the image (1.0=highest quality/slowest speed, 0.0=worst quality/fastest speed) /// @param scale The scale factor for the preview images (1.0 = original size, 0.5=half size, 0.25=quarter size, etc...) /// @param max_bytes The maximum bytes to allow in the cache. Once exceeded, the cache will purge the oldest frames. - CacheDisk(string cache_path, string format, float quality, float scale, int64_t max_bytes); + CacheDisk(std::string cache_path, std::string format, float quality, float scale, int64_t max_bytes); // Default destructor ~CacheDisk(); /// @brief Add a Frame to the cache /// @param frame The openshot::Frame object needing to be cached. - void Add(std::shared_ptr frame); + void Add(std::shared_ptr frame); /// Clear the cache of all frames void Clear(); @@ -103,13 +106,13 @@ /// @brief Get a frame from the cache /// @param frame_number The frame number of the cached frame - std::shared_ptr GetFrame(int64_t frame_number); + std::shared_ptr GetFrame(int64_t frame_number); /// Gets the maximum bytes value int64_t GetBytes(); /// Get the smallest frame number - std::shared_ptr GetSmallestFrame(); + std::shared_ptr GetSmallestFrame(); /// @brief Move frame to front of queue (so it lasts longer) /// @param frame_number The frame number of the cached frame @@ -125,10 +128,10 @@ void Remove(int64_t start_frame_number, int64_t end_frame_number); /// Get and Set JSON methods - string Json(); ///< Generate JSON string of this object - void SetJson(string value); ///< Load JSON string into this object - Json::Value JsonValue(); ///< Generate Json::JsonValue for this object - void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object + std::string Json(); ///< Generate JSON string of this object + void SetJson(const std::string value); ///< Load JSON string into this object + Json::Value JsonValue(); ///< Generate Json::Value for this object + void SetJsonValue(const Json::Value root); ///< Load Json::Value into this object }; } diff -Nru libopenshot-0.2.2+dfsg1/include/CacheMemory.h libopenshot-0.2.5+dfsg1/include/CacheMemory.h --- libopenshot-0.2.2+dfsg1/include/CacheMemory.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/CacheMemory.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for CacheMemory class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -47,13 +50,13 @@ */ class CacheMemory : public CacheBase { private: - map > frames; ///< This map holds the frame number and Frame objects - deque frame_numbers; ///< This queue holds a sequential list of cached Frame numbers + std::map > frames; ///< This map holds the frame number and Frame objects + std::deque frame_numbers; ///< This queue holds a sequential list of cached Frame numbers bool needs_range_processing; ///< Something has changed, and the range data needs to be re-calculated - string json_ranges; ///< JSON ranges of frame numbers - vector ordered_frame_numbers; ///< Ordered list of frame numbers used by cache - map frame_ranges; ///< This map holds the ranges of frames, useful for quickly displaying the contents of the cache + std::string json_ranges; ///< JSON ranges of frame numbers + std::vector ordered_frame_numbers; ///< Ordered list of frame numbers used by cache + std::map frame_ranges; ///< This map holds the ranges of frames, useful for quickly displaying the contents of the cache int64_t range_version; ///< The version of the JSON range data (incremented with each change) /// Clean up cached frames that exceed the max number of bytes @@ -71,11 +74,11 @@ CacheMemory(int64_t max_bytes); // Default destructor - ~CacheMemory(); + virtual ~CacheMemory(); /// @brief Add a Frame to the cache /// @param frame The openshot::Frame object needing to be cached. - void Add(std::shared_ptr frame); + void Add(std::shared_ptr frame); /// Clear the cache of all frames void Clear(); @@ -85,13 +88,13 @@ /// @brief Get a frame from the cache /// @param frame_number The frame number of the cached frame - std::shared_ptr GetFrame(int64_t frame_number); + std::shared_ptr GetFrame(int64_t frame_number); /// Gets the maximum bytes value int64_t GetBytes(); /// Get the smallest frame number - std::shared_ptr GetSmallestFrame(); + std::shared_ptr GetSmallestFrame(); /// @brief Move frame to front of queue (so it lasts longer) /// @param frame_number The frame number of the cached frame @@ -107,10 +110,10 @@ void Remove(int64_t start_frame_number, int64_t end_frame_number); /// Get and Set JSON methods - string Json(); ///< Generate JSON string of this object - void SetJson(string value); ///< Load JSON string into this object - Json::Value JsonValue(); ///< Generate Json::JsonValue for this object - void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object + std::string Json(); ///< Generate JSON string of this object + void SetJson(const std::string value); ///< Load JSON string into this object + Json::Value JsonValue(); ///< Generate Json::Value for this object + void SetJsonValue(const Json::Value root); ///< Load Json::Value into this object }; } diff -Nru libopenshot-0.2.2+dfsg1/include/ChannelLayouts.h libopenshot-0.2.5+dfsg1/include/ChannelLayouts.h --- libopenshot-0.2.2+dfsg1/include/ChannelLayouts.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/ChannelLayouts.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for ChannelLayout class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the diff -Nru libopenshot-0.2.2+dfsg1/include/ChunkReader.h libopenshot-0.2.5+dfsg1/include/ChunkReader.h --- libopenshot-0.2.2+dfsg1/include/ChunkReader.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/ChunkReader.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for ChunkReader class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -29,8 +32,6 @@ #define OPENSHOT_CHUNK_READER_H #include "ReaderBase.h" -#include "FFmpegReader.h" - #include #include #include @@ -38,14 +39,12 @@ #include #include #include -#include +#include #include #include "Json.h" #include "CacheMemory.h" #include "Exceptions.h" -using namespace std; - namespace openshot { @@ -104,22 +103,22 @@ class ChunkReader : public ReaderBase { private: - string path; + std::string path; bool is_open; int64_t chunk_size; - FFmpegReader *local_reader; + openshot::ReaderBase *local_reader; ChunkLocation previous_location; ChunkVersion version; - std::shared_ptr last_frame; + std::shared_ptr last_frame; /// Check if folder path existing - bool does_folder_exist(string path); + bool does_folder_exist(std::string path); /// Find the location of a frame in a chunk ChunkLocation find_chunk_frame(int64_t requested_frame); /// get a formatted path of a specific chunk - string get_chunk_path(int64_t chunk_number, string folder, string extension); + std::string get_chunk_path(int64_t chunk_number, std::string folder, std::string extension); /// Load JSON meta data about this chunk folder void load_json(); @@ -130,7 +129,7 @@ /// frame 1, or it throws one of the following exceptions. /// @param path The folder path / location of a chunk (chunks are stored as folders) /// @param chunk_version Choose the video version / quality (THUMBNAIL, PREVIEW, or FINAL) - ChunkReader(string path, ChunkVersion chunk_version); + ChunkReader(std::string path, ChunkVersion chunk_version); /// Close the reader void Close(); @@ -144,24 +143,24 @@ void SetChunkSize(int64_t new_size) { chunk_size = new_size; }; /// Get the cache object used by this reader (always return NULL for this reader) - CacheMemory* GetCache() { return NULL; }; + openshot::CacheMemory* GetCache() { return NULL; }; /// @brief Get an openshot::Frame object for a specific frame number of this reader. /// @returns The requested frame (containing the image and audio) /// @param requested_frame The frame number you want to retrieve - std::shared_ptr GetFrame(int64_t requested_frame); + std::shared_ptr GetFrame(int64_t requested_frame); /// Determine if reader is open or closed bool IsOpen() { return is_open; }; /// Return the type name of the class - string Name() { return "ChunkReader"; }; + std::string Name() { return "ChunkReader"; }; /// Get and Set JSON methods - string Json(); ///< Generate JSON string of this object - void SetJson(string value); ///< Load JSON string into this object - Json::Value JsonValue(); ///< Generate Json::JsonValue for this object - void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object + std::string Json() const override; ///< Generate JSON string of this object + void SetJson(const std::string value); ///< Load JSON string into this object + Json::Value JsonValue() const override; ///< Generate Json::Value for this object + void SetJsonValue(const Json::Value root); ///< Load Json::Value into this object /// Open the reader. This is required before you can access frames or data from the reader. void Open(); diff -Nru libopenshot-0.2.2+dfsg1/include/ChunkWriter.h libopenshot-0.2.5+dfsg1/include/ChunkWriter.h --- libopenshot-0.2.2+dfsg1/include/ChunkWriter.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/ChunkWriter.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for ChunkWriter class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -46,8 +49,6 @@ #include "Json.h" -using namespace std; - namespace openshot { /** @@ -80,27 +81,27 @@ class ChunkWriter : public WriterBase { private: - string path; + std::string path; int64_t chunk_count; int64_t chunk_size; int64_t frame_count; bool is_open; bool is_writing; - ReaderBase *local_reader; - FFmpegWriter *writer_thumb; - FFmpegWriter *writer_preview; - FFmpegWriter *writer_final; + openshot::ReaderBase *local_reader; + openshot::FFmpegWriter *writer_thumb; + openshot::FFmpegWriter *writer_preview; + openshot::FFmpegWriter *writer_final; std::shared_ptr last_frame; bool last_frame_needed; - string default_extension; - string default_vcodec; - string default_acodec; + std::string default_extension; + std::string default_vcodec; + std::string default_acodec; /// check for chunk folder - void create_folder(string path); + void create_folder(std::string path); /// get a formatted path of a specific chunk - string get_chunk_path(int64_t chunk_number, string folder, string extension); + std::string get_chunk_path(int64_t chunk_number, std::string folder, std::string extension); /// check for valid chunk json bool is_chunk_valid(); @@ -113,7 +114,7 @@ /// @brief Constructor for ChunkWriter. Throws one of the following exceptions. /// @param path The folder path of the chunk file to be created /// @param reader The initial reader to base this chunk file's meta data on (such as fps, height, width, etc...) - ChunkWriter(string path, ReaderBase *reader); + ChunkWriter(std::string path, openshot::ReaderBase *reader); /// Close the writer void Close(); @@ -133,7 +134,7 @@ /// @brief Add a frame to the stack waiting to be encoded. /// @param frame The openshot::Frame object that needs to be written to this chunk file. - void WriteFrame(std::shared_ptr frame); + void WriteFrame(std::shared_ptr frame); /// @brief Write a block of frames from a reader /// @param start The starting frame number to write (of the reader passed into the constructor) @@ -144,7 +145,7 @@ /// @param reader The reader containing the frames you need /// @param start The starting frame number to write /// @param length The number of frames to write - void WriteFrame(ReaderBase* reader, int64_t start, int64_t length); + void WriteFrame(openshot::ReaderBase* reader, int64_t start, int64_t length); }; diff -Nru libopenshot-0.2.2+dfsg1/include/ClipBase.h libopenshot-0.2.5+dfsg1/include/ClipBase.h --- libopenshot-0.2.2+dfsg1/include/ClipBase.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/ClipBase.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for ClipBase class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -28,11 +31,6 @@ #ifndef OPENSHOT_CLIPBASE_H #define OPENSHOT_CLIPBASE_H -/// Do not include the juce unittest headers, because it collides with unittest++ -#ifndef __JUCE_UNITTEST_JUCEHEADER__ - #define __JUCE_UNITTEST_JUCEHEADER__ -#endif - #include #include #include "Exceptions.h" @@ -40,8 +38,6 @@ #include "KeyFrame.h" #include "Json.h" -using namespace std; - namespace openshot { /** @@ -52,25 +48,23 @@ */ class ClipBase { protected: - string id; ///< ID Property for all derived Clip and Effect classes. + std::string id; ///< ID Property for all derived Clip and Effect classes. float position; ///< The position on the timeline where this clip should start playing int layer; ///< The layer this clip is on. Lower clips are covered up by higher clips. float start; ///< The position in seconds to start playing (used to trim the beginning of a clip) float end; ///< The position in seconds to end playing (used to trim the ending of a clip) - string previous_properties; ///< This string contains the previous JSON properties - int max_width; ///< The maximum image width needed by this clip (used for optimizations) - int max_height; ///< The maximium image height needed by this clip (used for optimizations) + std::string previous_properties; ///< This string contains the previous JSON properties /// Generate JSON for a property - Json::Value add_property_json(string name, float value, string type, string memo, Keyframe* keyframe, float min_value, float max_value, bool readonly, int64_t requested_frame); + Json::Value add_property_json(std::string name, float value, std::string type, std::string memo, const Keyframe* keyframe, float min_value, float max_value, bool readonly, int64_t requested_frame) const; /// Generate JSON choice for a property (dropdown properties) - Json::Value add_property_choice_json(string name, int value, int selected_value); + Json::Value add_property_choice_json(std::string name, int value, int selected_value) const; public: /// Constructor for the base clip - ClipBase() { max_width = 0; max_height = 0; }; + ClipBase() { }; // Compare a clip using the Position() property bool operator< ( ClipBase& a) { return (Position() < a.Position()); } @@ -79,33 +73,31 @@ bool operator>= ( ClipBase& a) { return (Position() >= a.Position()); } /// Get basic properties - string Id() { return id; } ///< Get the Id of this clip object - float Position() { return position; } ///< Get position on timeline (in seconds) - int Layer() { return layer; } ///< Get layer of clip on timeline (lower number is covered by higher numbers) - float Start() { return start; } ///< Get start position (in seconds) of clip (trim start of video) - float End() { return end; } ///< Get end position (in seconds) of clip (trim end of video) - float Duration() { return end - start; } ///< Get the length of this clip (in seconds) + std::string Id() const { return id; } ///< Get the Id of this clip object + float Position() const { return position; } ///< Get position on timeline (in seconds) + int Layer() const { return layer; } ///< Get layer of clip on timeline (lower number is covered by higher numbers) + float Start() const { return start; } ///< Get start position (in seconds) of clip (trim start of video) + float End() const { return end; } ///< Get end position (in seconds) of clip (trim end of video) + float Duration() const { return end - start; } ///< Get the length of this clip (in seconds) /// Set basic properties - void Id(string value) { id = value; } ///> Set the Id of this clip object + void Id(std::string value) { id = value; } ///> Set the Id of this clip object void Position(float value) { position = value; } ///< Set position on timeline (in seconds) void Layer(int value) { layer = value; } ///< Set layer of clip on timeline (lower number is covered by higher numbers) void Start(float value) { start = value; } ///< Set start position (in seconds) of clip (trim start of video) void End(float value) { end = value; } ///< Set end position (in seconds) of clip (trim end of video) - /// Set Max Image Size (used for performance optimization) - void SetMaxSize(int width, int height) { max_width = width; max_height = height; }; - /// Get and Set JSON methods - virtual string Json() = 0; ///< Generate JSON string of this object - virtual void SetJson(string value) = 0; ///< Load JSON string into this object - virtual Json::Value JsonValue() = 0; ///< Generate Json::JsonValue for this object - virtual void SetJsonValue(Json::Value root) = 0; ///< Load Json::JsonValue into this object + virtual std::string Json() const = 0; ///< Generate JSON string of this object + virtual void SetJson(const std::string value) = 0; ///< Load JSON string into this object + virtual Json::Value JsonValue() const = 0; ///< Generate Json::Value for this object + virtual void SetJsonValue(const Json::Value root) = 0; ///< Load Json::Value into this object /// Get all properties for a specific frame (perfect for a UI to display the current state /// of all properties at any time) - virtual string PropertiesJSON(int64_t requested_frame) = 0; + virtual std::string PropertiesJSON(int64_t requested_frame) const = 0; + virtual ~ClipBase() = default; }; diff -Nru libopenshot-0.2.2+dfsg1/include/Clip.h libopenshot-0.2.5+dfsg1/include/Clip.h --- libopenshot-0.2.2+dfsg1/include/Clip.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/Clip.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for Clip class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -28,15 +31,9 @@ #ifndef OPENSHOT_CLIP_H #define OPENSHOT_CLIP_H -/// Do not include the juce unittest headers, because it collides with unittest++ -#ifndef __JUCE_UNITTEST_JUCEHEADER__ - #define __JUCE_UNITTEST_JUCEHEADER__ -#endif - #include #include #include -#include "JuceLibraryCode/JuceHeader.h" #include "AudioResampler.h" #include "ClipBase.h" #include "Color.h" @@ -44,21 +41,10 @@ #include "EffectBase.h" #include "Effects.h" #include "EffectInfo.h" -#include "FFmpegReader.h" #include "Fraction.h" -#include "FrameMapper.h" -#ifdef USE_IMAGEMAGICK - #include "ImageReader.h" - #include "TextReader.h" -#endif -#include "QtImageReader.h" -#include "ChunkReader.h" #include "KeyFrame.h" #include "ReaderBase.h" -#include "DummyReader.h" - -using namespace std; -using namespace openshot; +#include "JuceHeader.h" namespace openshot { @@ -66,7 +52,7 @@ /// from lowest layer to top layer (since that is sequence clips are combined), and then by /// position, and then by effect order. struct CompareClipEffects{ - bool operator()( EffectBase* lhs, EffectBase* rhs){ + bool operator()( openshot::EffectBase* lhs, openshot::EffectBase* rhs){ if( lhs->Layer() < rhs->Layer() ) return true; if( lhs->Layer() == rhs->Layer() && lhs->Position() < rhs->Position() ) return true; if( lhs->Layer() == rhs->Layer() && lhs->Position() == rhs->Position() && lhs->Order() > rhs->Order() ) return true; @@ -85,7 +71,7 @@ * Clip c1(new ImageReader("MyAwesomeLogo.jpeg")); * Clip c2(new FFmpegReader("BackgroundVideo.webm")); * - * // CLIP 1 (logo) - Set some clip properties (with Keyframes) + * // CLIP 1 (logo) - Set some clip properties (with openshot::Keyframes) * c1.Position(0.0); // Set the position or location (in seconds) on the timeline * c1.gravity = GRAVITY_LEFT; // Set the alignment / gravity of the clip (position on the screen) * c1.scale = SCALE_CROP; // Set the scale mode (how the image is resized to fill the screen) @@ -96,7 +82,7 @@ * c1.alpha.AddPoint(500, 0.0); // Keep the alpha transparent until frame #500 * c1.alpha.AddPoint(565, 1.0); // Animate the alpha from transparent to visible (between frame #501 and #565) * - * // CLIP 2 (background video) - Set some clip properties (with Keyframes) + * // CLIP 2 (background video) - Set some clip properties (with openshot::Keyframes) * c2.Position(0.0); // Set the position or location (in seconds) on the timeline * c2.Start(10.0); // Set the starting position of the video (trim the left side of the video) * c2.Layer(0); // Set the layer of the timeline (higher layers cover up images of lower layers) @@ -106,37 +92,40 @@ * c2.alpha.AddPoint(384, 1.0); // Animate the alpha to visible (between frame #360 and frame #384) * @endcode */ - class Clip : public ClipBase { + class Clip : public openshot::ClipBase { protected: /// Section lock for multiple threads - CriticalSection getFrameCriticalSection; + juce::CriticalSection getFrameCriticalSection; private: bool waveform; ///< Should a waveform be used instead of the clip's image - list effects; /// effects; /// apply_effects(std::shared_ptr frame); + std::shared_ptr apply_effects(std::shared_ptr frame); /// Get file extension - string get_file_extension(string path); + std::string get_file_extension(std::string path); /// Get a frame object or create a blank one - std::shared_ptr GetOrCreateFrame(int64_t number); + std::shared_ptr GetOrCreateFrame(int64_t number); /// Adjust the audio and image of a time mapped frame - std::shared_ptr get_time_mapped_frame(std::shared_ptr frame, int64_t frame_number); + void get_time_mapped_frame(std::shared_ptr frame, int64_t frame_number); /// Init default settings for a clip void init_settings(); @@ -151,117 +140,117 @@ void reverse_buffer(juce::AudioSampleBuffer* buffer); public: - GravityType gravity; ///< The gravity of a clip determines where it snaps to it's parent - ScaleType scale; ///< The scale determines how a clip should be resized to fit it's parent - AnchorType anchor; ///< The anchor determines what parent a clip should snap to - FrameDisplayType display; ///< The format to display the frame number (if any) - VolumeMixType mixing; ///< What strategy should be followed when mixing audio with other clips + openshot::GravityType gravity; ///< The gravity of a clip determines where it snaps to its parent + openshot::ScaleType scale; ///< The scale determines how a clip should be resized to fit its parent + openshot::AnchorType anchor; ///< The anchor determines what parent a clip should snap to + openshot::FrameDisplayType display; ///< The format to display the frame number (if any) + openshot::VolumeMixType mixing; ///< What strategy should be followed when mixing audio with other clips /// Default Constructor Clip(); /// @brief Constructor with filepath (reader is automatically created... by guessing file extensions) /// @param path The path of a reader (video file, image file, etc...). The correct reader will be used automatically. - Clip(string path); + Clip(std::string path); /// @brief Constructor with reader /// @param new_reader The reader to be used by this clip - Clip(ReaderBase* new_reader); + Clip(openshot::ReaderBase* new_reader); /// Destructor - ~Clip(); + virtual ~Clip(); /// @brief Add an effect to the clip /// @param effect Add an effect to the clip. An effect can modify the audio or video of an openshot::Frame. - void AddEffect(EffectBase* effect); + void AddEffect(openshot::EffectBase* effect); /// Close the internal reader void Close(); /// Return the list of effects on the timeline - list Effects() { return effects; }; + std::list Effects() { return effects; }; /// @brief Get an openshot::Frame object for a specific frame number of this timeline. /// /// @returns The requested frame (containing the image) /// @param requested_frame The frame number that is requested - std::shared_ptr GetFrame(int64_t requested_frame); + std::shared_ptr GetFrame(int64_t requested_frame); /// Open the internal reader void Open(); /// @brief Set the current reader /// @param new_reader The reader to be used by this clip - void Reader(ReaderBase* new_reader); + void Reader(openshot::ReaderBase* new_reader); /// Get the current reader - ReaderBase* Reader(); + openshot::ReaderBase* Reader(); /// Override End() method - float End(); ///< Get end position (in seconds) of clip (trim end of video), which can be affected by the time curve. + float End() const; ///< Get end position (in seconds) of clip (trim end of video), which can be affected by the time curve. void End(float value) { end = value; } ///< Set end position (in seconds) of clip (trim end of video) /// Get and Set JSON methods - string Json(); ///< Generate JSON string of this object - void SetJson(string value); ///< Load JSON string into this object - Json::Value JsonValue(); ///< Generate Json::JsonValue for this object - void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object + std::string Json() const override; ///< Generate JSON string of this object + void SetJson(const std::string value); ///< Load JSON string into this object + Json::Value JsonValue() const override; ///< Generate Json::Value for this object + void SetJsonValue(const Json::Value root); ///< Load Json::Value into this object /// Get all properties for a specific frame (perfect for a UI to display the current state /// of all properties at any time) - string PropertiesJSON(int64_t requested_frame); + std::string PropertiesJSON(int64_t requested_frame) const override; /// @brief Remove an effect from the clip /// @param effect Remove an effect from the clip. - void RemoveEffect(EffectBase* effect); + void RemoveEffect(openshot::EffectBase* effect); /// Waveform property bool Waveform() { return waveform; } ///< Get the waveform property of this clip void Waveform(bool value) { waveform = value; } ///< Set the waveform property of this clip // Scale and Location curves - Keyframe scale_x; ///< Curve representing the horizontal scaling in percent (0 to 1) - Keyframe scale_y; ///< Curve representing the vertical scaling in percent (0 to 1) - Keyframe location_x; ///< Curve representing the relative X position in percent based on the gravity (-1 to 1) - Keyframe location_y; ///< Curve representing the relative Y position in percent based on the gravity (-1 to 1) + openshot::Keyframe scale_x; ///< Curve representing the horizontal scaling in percent (0 to 1) + openshot::Keyframe scale_y; ///< Curve representing the vertical scaling in percent (0 to 1) + openshot::Keyframe location_x; ///< Curve representing the relative X position in percent based on the gravity (-1 to 1) + openshot::Keyframe location_y; ///< Curve representing the relative Y position in percent based on the gravity (-1 to 1) // Alpha and Rotation curves - Keyframe alpha; ///< Curve representing the alpha (1 to 0) - Keyframe rotation; ///< Curve representing the rotation (0 to 360) + openshot::Keyframe alpha; ///< Curve representing the alpha (1 to 0) + openshot::Keyframe rotation; ///< Curve representing the rotation (0 to 360) // Time and Volume curves - Keyframe time; ///< Curve representing the frames over time to play (used for speed and direction of video) - Keyframe volume; ///< Curve representing the volume (0 to 1) + openshot::Keyframe time; ///< Curve representing the frames over time to play (used for speed and direction of video) + openshot::Keyframe volume; ///< Curve representing the volume (0 to 1) /// Curve representing the color of the audio wave form - Color wave_color; + openshot::Color wave_color; // Crop settings and curves - GravityType crop_gravity; ///< Cropping needs to have a gravity to determine what side we are cropping - Keyframe crop_width; ///< Curve representing width in percent (0.0=0%, 1.0=100%) - Keyframe crop_height; ///< Curve representing height in percent (0.0=0%, 1.0=100%) - Keyframe crop_x; ///< Curve representing X offset in percent (-1.0=-100%, 0.0=0%, 1.0=100%) - Keyframe crop_y; ///< Curve representing Y offset in percent (-1.0=-100%, 0.0=0%, 1.0=100%) + openshot::GravityType crop_gravity; ///< Cropping needs to have a gravity to determine what side we are cropping + openshot::Keyframe crop_width; ///< Curve representing width in percent (0.0=0%, 1.0=100%) + openshot::Keyframe crop_height; ///< Curve representing height in percent (0.0=0%, 1.0=100%) + openshot::Keyframe crop_x; ///< Curve representing X offset in percent (-1.0=-100%, 0.0=0%, 1.0=100%) + openshot::Keyframe crop_y; ///< Curve representing Y offset in percent (-1.0=-100%, 0.0=0%, 1.0=100%) // Shear and perspective curves - Keyframe shear_x; ///< Curve representing X shear angle in degrees (-45.0=left, 45.0=right) - Keyframe shear_y; ///< Curve representing Y shear angle in degrees (-45.0=down, 45.0=up) - Keyframe perspective_c1_x; ///< Curves representing X for coordinate 1 - Keyframe perspective_c1_y; ///< Curves representing Y for coordinate 1 - Keyframe perspective_c2_x; ///< Curves representing X for coordinate 2 - Keyframe perspective_c2_y; ///< Curves representing Y for coordinate 2 - Keyframe perspective_c3_x; ///< Curves representing X for coordinate 3 - Keyframe perspective_c3_y; ///< Curves representing Y for coordinate 3 - Keyframe perspective_c4_x; ///< Curves representing X for coordinate 4 - Keyframe perspective_c4_y; ///< Curves representing Y for coordinate 4 + openshot::Keyframe shear_x; ///< Curve representing X shear angle in degrees (-45.0=left, 45.0=right) + openshot::Keyframe shear_y; ///< Curve representing Y shear angle in degrees (-45.0=down, 45.0=up) + openshot::Keyframe perspective_c1_x; ///< Curves representing X for coordinate 1 + openshot::Keyframe perspective_c1_y; ///< Curves representing Y for coordinate 1 + openshot::Keyframe perspective_c2_x; ///< Curves representing X for coordinate 2 + openshot::Keyframe perspective_c2_y; ///< Curves representing Y for coordinate 2 + openshot::Keyframe perspective_c3_x; ///< Curves representing X for coordinate 3 + openshot::Keyframe perspective_c3_y; ///< Curves representing Y for coordinate 3 + openshot::Keyframe perspective_c4_x; ///< Curves representing X for coordinate 4 + openshot::Keyframe perspective_c4_y; ///< Curves representing Y for coordinate 4 /// Audio channel filter and mappings - Keyframe channel_filter; ///< A number representing an audio channel to filter (clears all other channels) - Keyframe channel_mapping; ///< A number representing an audio channel to output (only works when filtering a channel) + openshot::Keyframe channel_filter; ///< A number representing an audio channel to filter (clears all other channels) + openshot::Keyframe channel_mapping; ///< A number representing an audio channel to output (only works when filtering a channel) /// Override has_video and has_audio properties of clip (and their readers) - Keyframe has_audio; ///< An optional override to determine if this clip has audio (-1=undefined, 0=no, 1=yes) - Keyframe has_video; ///< An optional override to determine if this clip has video (-1=undefined, 0=no, 1=yes) + openshot::Keyframe has_audio; ///< An optional override to determine if this clip has audio (-1=undefined, 0=no, 1=yes) + openshot::Keyframe has_video; ///< An optional override to determine if this clip has video (-1=undefined, 0=no, 1=yes) }; diff -Nru libopenshot-0.2.2+dfsg1/include/Color.h libopenshot-0.2.5+dfsg1/include/Color.h --- libopenshot-0.2.2+dfsg1/include/Color.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/Color.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for Color class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -42,34 +45,34 @@ class Color{ public: - Keyframe red; /// * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -33,8 +36,6 @@ #include "Fraction.h" #include "Json.h" -using namespace std; - namespace openshot { /** @@ -52,11 +53,6 @@ * \endcode */ class Coordinate { - private: - bool increasing; ///< Is the Y value increasing or decreasing? - Fraction repeated; ///< Fraction of repeated Y values (for example, 1/3 would be the first Y value of 3 repeated values) - double delta; ///< This difference in Y value (from the previous unique Y value) - public: double X; ///< The X value of the coordinate (usually representing the frame #) double Y; ///< The Y value of the coordinate (usually representing the value of the property being animated) @@ -69,32 +65,11 @@ /// @param y The Y coordinate (usually representing the value of the property being animated) Coordinate(double x, double y); - /// @brief Set the repeating Fraction (used internally on the timeline, to track changes to coordinates) - /// @param is_repeated The fraction representing how many times this coordinate Y value repeats (only used on the timeline) - void Repeat(Fraction is_repeated) { repeated=is_repeated; } - - /// Get the repeating Fraction (used internally on the timeline, to track changes to coordinates) - Fraction Repeat() { return repeated; } - - /// @brief Set the increasing flag (used internally on the timeline, to track changes to coordinates) - /// @param is_increasing Indicates if this coordinate Y value is increasing (when compared to the previous coordinate) - void IsIncreasing(bool is_increasing) { increasing = is_increasing; } - - /// Get the increasing flag (used internally on the timeline, to track changes to coordinates) - bool IsIncreasing() { return increasing; } - - /// @brief Set the delta / difference between previous coordinate value (used internally on the timeline, to track changes to coordinates) - /// @param new_delta Indicates how much this Y value differs from the previous Y value - void Delta(double new_delta) { delta=new_delta; } - - /// Get the delta / difference between previous coordinate value (used internally on the timeline, to track changes to coordinates) - float Delta() { return delta; } - /// Get and Set JSON methods - string Json(); ///< Generate JSON string of this object - Json::Value JsonValue(); ///< Generate Json::JsonValue for this object - void SetJson(string value); ///< Load JSON string into this object - void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object + std::string Json() const; ///< Generate JSON string of this object + Json::Value JsonValue() const; ///< Generate Json::Value for this object + void SetJson(const std::string value); ///< Load JSON string into this object + void SetJsonValue(const Json::Value root); ///< Load Json::Value into this object }; } diff -Nru libopenshot-0.2.2+dfsg1/include/CrashHandler.h libopenshot-0.2.5+dfsg1/include/CrashHandler.h --- libopenshot-0.2.2+dfsg1/include/CrashHandler.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/CrashHandler.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for CrashHandler class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the diff -Nru libopenshot-0.2.2+dfsg1/include/DecklinkInput.h libopenshot-0.2.5+dfsg1/include/DecklinkInput.h --- libopenshot-0.2.2+dfsg1/include/DecklinkInput.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/DecklinkInput.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,7 +3,10 @@ * @brief Header file for DecklinkInput class * @author Jonathan Thomas , Blackmagic Design * - * @section LICENSE + * @ref License + */ + +/* LICENSE * * Copyright (c) 2009 Blackmagic Design * @@ -30,7 +33,7 @@ * DEALINGS IN THE SOFTWARE. * * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -76,7 +79,7 @@ unsigned long final_frameCount; // Queue of raw video frames - deque raw_video_frames; + std::deque raw_video_frames; openshot::CacheMemory final_frames; // Convert between YUV and RGB diff -Nru libopenshot-0.2.2+dfsg1/include/DecklinkOutput.h libopenshot-0.2.5+dfsg1/include/DecklinkOutput.h --- libopenshot-0.2.2+dfsg1/include/DecklinkOutput.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/DecklinkOutput.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,7 +3,10 @@ * @brief Header file for DecklinkOutput class * @author Jonathan Thomas , Blackmagic Design * - * @section LICENSE + * @ref License + */ + +/* LICENSE * * Copyright (c) 2009 Blackmagic Design * @@ -30,7 +33,7 @@ * DEALINGS IN THE SOFTWARE. * * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -90,14 +93,14 @@ unsigned long frameCount; //map temp_cache; - map temp_cache; + std::map temp_cache; BMDTimeValue frameRateDuration, frameRateScale; // Queue of raw video frames //deque final_frames; - deque final_frames; - deque > raw_video_frames; + std::deque final_frames; + std::deque > raw_video_frames; // Convert between YUV and RGB IDeckLinkOutput *deckLinkOutput; diff -Nru libopenshot-0.2.2+dfsg1/include/DecklinkReader.h libopenshot-0.2.5+dfsg1/include/DecklinkReader.h --- libopenshot-0.2.2+dfsg1/include/DecklinkReader.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/DecklinkReader.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for DecklinkReader class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -47,8 +50,6 @@ #include "Frame.h" #include "DecklinkInput.h" -using namespace std; - namespace openshot { @@ -114,13 +115,13 @@ bool IsOpen() { return is_open; }; /// Return the type name of the class - string Name() { return "DecklinkReader"; }; + std::string Name() { return "DecklinkReader"; }; /// Get and Set JSON methods - string Json(); ///< Generate JSON string of this object - void SetJson(string value); ///< Load JSON string into this object - Json::Value JsonValue(); ///< Generate Json::JsonValue for this object - void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object + std::string Json() const override; ///< Generate JSON string of this object + void SetJson(const std::string value); ///< Load JSON string into this object + Json::Value JsonValue() const; ///< Generate Json::Value for this object + void SetJsonValue(const Json::Value root); ///< Load Json::Value into this object /// Open device and video stream - which is called by the constructor automatically void Open(); diff -Nru libopenshot-0.2.2+dfsg1/include/DecklinkWriter.h libopenshot-0.2.5+dfsg1/include/DecklinkWriter.h --- libopenshot-0.2.2+dfsg1/include/DecklinkWriter.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/DecklinkWriter.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for DecklinkWriter class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -47,8 +50,6 @@ #include "Frame.h" #include "DecklinkOutput.h" -using namespace std; - namespace openshot { diff -Nru libopenshot-0.2.2+dfsg1/include/DummyReader.h libopenshot-0.2.5+dfsg1/include/DummyReader.h --- libopenshot-0.2.2+dfsg1/include/DummyReader.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/DummyReader.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for DummyReader class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -40,8 +43,6 @@ #include "Exceptions.h" #include "Fraction.h" -using namespace std; - namespace openshot { /** @@ -53,7 +54,7 @@ class DummyReader : public ReaderBase { private: - std::shared_ptr image_frame; + std::shared_ptr image_frame; bool is_open; public: @@ -62,7 +63,9 @@ DummyReader(); /// Constructor for DummyReader. - DummyReader(Fraction fps, int width, int height, int sample_rate, int channels, float duration); + DummyReader(openshot::Fraction fps, int width, int height, int sample_rate, int channels, float duration); + + virtual ~DummyReader(); /// Close File void Close(); @@ -75,19 +78,19 @@ /// /// @returns The requested frame (containing the image) /// @param requested_frame The frame number that is requested. - std::shared_ptr GetFrame(int64_t requested_frame); + std::shared_ptr GetFrame(int64_t requested_frame); /// Determine if reader is open or closed bool IsOpen() { return is_open; }; /// Return the type name of the class - string Name() { return "DummyReader"; }; + std::string Name() { return "DummyReader"; }; /// Get and Set JSON methods - string Json(); ///< Generate JSON string of this object - void SetJson(string value); ///< Load JSON string into this object - Json::Value JsonValue(); ///< Generate Json::JsonValue for this object - void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object + std::string Json() const override; ///< Generate JSON string of this object + void SetJson(const std::string value); ///< Load JSON string into this object + Json::Value JsonValue() const override; ///< Generate Json::Value for this object + void SetJsonValue(const Json::Value root); ///< Load Json::Value into this object /// Open File - which is called by the constructor automatically void Open(); diff -Nru libopenshot-0.2.2+dfsg1/include/EffectBase.h libopenshot-0.2.5+dfsg1/include/EffectBase.h --- libopenshot-0.2.2+dfsg1/include/EffectBase.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/EffectBase.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for EffectBase class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -32,10 +35,8 @@ #include #include #include "ClipBase.h" -#include "Frame.h" #include "Json.h" - -using namespace std; +#include "Frame.h" namespace openshot { @@ -48,10 +49,9 @@ */ struct EffectInfoStruct { - string class_name; ///< The class name of the effect - string short_name; ///< A short name of the effect, commonly used for icon names, etc... - string name; ///< The name of the effect - string description; ///< The description of this effect and what it does + std::string class_name; ///< The class name of the effect + std::string name; ///< The name of the effect + std::string description; ///< The description of this effect and what it does bool has_video; ///< Determines if this effect manipulates the image of a frame bool has_audio; ///< Determines if this effect manipulates the audio of a frame }; @@ -82,29 +82,30 @@ /// modified openshot::Frame object /// /// The frame object is passed into this method, and a frame_number is passed in which - /// tells the effect which settings to use from it's keyframes (starting at 1). + /// tells the effect which settings to use from its keyframes (starting at 1). /// /// @returns The modified openshot::Frame object /// @param frame The frame object that needs the effect applied to it /// @param frame_number The frame number (starting at 1) of the effect on the timeline. - virtual std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number) = 0; + virtual std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number) = 0; /// Initialize the values of the EffectInfo struct. It is important for derived classes to call /// this method, or the EffectInfo struct values will not be initialized. void InitEffectInfo(); /// Get and Set JSON methods - virtual string Json() = 0; ///< Generate JSON string of this object - virtual void SetJson(string value) = 0; ///< Load JSON string into this object - virtual Json::Value JsonValue() = 0; ///< Generate Json::JsonValue for this object - virtual void SetJsonValue(Json::Value root) = 0; ///< Load Json::JsonValue into this object - Json::Value JsonInfo(); ///< Generate JSON object of meta data / info + virtual std::string Json() const = 0; ///< Generate JSON string of this object + virtual void SetJson(const std::string value) = 0; ///< Load JSON string into this object + virtual Json::Value JsonValue() const = 0; ///< Generate Json::Value for this object + virtual void SetJsonValue(const Json::Value root) = 0; ///< Load Json::Value into this object + Json::Value JsonInfo() const; ///< Generate JSON object of meta data / info /// Get the order that this effect should be executed. - int Order() { return order; } + int Order() const { return order; } /// Set the order that this effect should be executed. void Order(int new_order) { order = new_order; } + virtual ~EffectBase() = default; }; } diff -Nru libopenshot-0.2.2+dfsg1/include/EffectInfo.h libopenshot-0.2.5+dfsg1/include/EffectInfo.h --- libopenshot-0.2.2+dfsg1/include/EffectInfo.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/EffectInfo.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for the EffectInfo class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -31,8 +34,6 @@ #include "Effects.h" -using namespace std; - namespace openshot { @@ -46,11 +47,11 @@ { public: // Create an instance of an effect (factory style) - EffectBase* CreateEffect(string effect_type); + EffectBase* CreateEffect(std::string effect_type); /// JSON methods - static string Json(); ///< Generate JSON string of this object - static Json::Value JsonValue(); ///< Generate Json::JsonValue for this object + static std::string Json(); ///< Generate JSON string of this object + static Json::Value JsonValue(); ///< Generate Json::Value for this object }; diff -Nru libopenshot-0.2.2+dfsg1/include/effects/Bars.h libopenshot-0.2.5+dfsg1/include/effects/Bars.h --- libopenshot-0.2.2+dfsg1/include/effects/Bars.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/effects/Bars.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for Bars effect class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -38,8 +41,6 @@ #include "../KeyFrame.h" -using namespace std; - namespace openshot { @@ -80,7 +81,7 @@ /// modified openshot::Frame object /// /// The frame object is passed into this method, and a frame_number is passed in which - /// tells the effect which settings to use from it's keyframes (starting at 1). + /// tells the effect which settings to use from its keyframes (starting at 1). /// /// @returns The modified openshot::Frame object /// @param frame The frame object that needs the effect applied to it @@ -88,14 +89,14 @@ std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number); /// Get and Set JSON methods - string Json(); ///< Generate JSON string of this object - void SetJson(string value); ///< Load JSON string into this object - Json::Value JsonValue(); ///< Generate Json::JsonValue for this object - void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object + std::string Json() const override; ///< Generate JSON string of this object + void SetJson(const std::string value); ///< Load JSON string into this object + Json::Value JsonValue() const override; ///< Generate Json::Value for this object + void SetJsonValue(const Json::Value root); ///< Load Json::Value into this object /// Get all properties for a specific frame (perfect for a UI to display the current state /// of all properties at any time) - string PropertiesJSON(int64_t requested_frame); + std::string PropertiesJSON(int64_t requested_frame) const override; }; } diff -Nru libopenshot-0.2.2+dfsg1/include/effects/Blur.h libopenshot-0.2.5+dfsg1/include/effects/Blur.h --- libopenshot-0.2.2+dfsg1/include/effects/Blur.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/effects/Blur.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for Blur effect class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -46,8 +49,6 @@ #include "../QtImageReader.h" #include "../ChunkReader.h" -using namespace std; - namespace openshot { @@ -66,7 +67,6 @@ void init_effect_details(); /// Internal blur methods (inspired and credited to http://blog.ivank.net/fastest-gaussian-blur.html) - int* initBoxes(float sigma, int n); void boxBlurH(unsigned char *scl, unsigned char *tcl, int w, int h, int r); void boxBlurT(unsigned char *scl, unsigned char *tcl, int w, int h, int r); @@ -93,7 +93,7 @@ /// modified openshot::Frame object /// /// The frame object is passed into this method, and a frame_number is passed in which - /// tells the effect which settings to use from it's keyframes (starting at 1). + /// tells the effect which settings to use from its keyframes (starting at 1). /// /// @returns The modified openshot::Frame object /// @param frame The frame object that needs the effect applied to it @@ -101,14 +101,14 @@ std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number); /// Get and Set JSON methods - string Json(); ///< Generate JSON string of this object - void SetJson(string value); ///< Load JSON string into this object - Json::Value JsonValue(); ///< Generate Json::JsonValue for this object - void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object + std::string Json() const override; ///< Generate JSON string of this object + void SetJson(const std::string value); ///< Load JSON string into this object + Json::Value JsonValue() const override; ///< Generate Json::Value for this object + void SetJsonValue(const Json::Value root); ///< Load Json::Value into this object /// Get all properties for a specific frame (perfect for a UI to display the current state /// of all properties at any time) - string PropertiesJSON(int64_t requested_frame); + std::string PropertiesJSON(int64_t requested_frame) const override; }; } diff -Nru libopenshot-0.2.2+dfsg1/include/effects/Brightness.h libopenshot-0.2.5+dfsg1/include/effects/Brightness.h --- libopenshot-0.2.2+dfsg1/include/effects/Brightness.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/effects/Brightness.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for Brightness class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -45,8 +48,6 @@ #include "../QtImageReader.h" #include "../ChunkReader.h" -using namespace std; - namespace openshot { @@ -72,15 +73,15 @@ /// Default constructor, which takes 2 curves. The curves adjust the brightness and // contrast of a frame's image. /// - /// @param new_brightness The curve to adjust the brightness (between 100 and -100) - /// @param new_contrast The curve to adjust the contrast (3 is typical, 20 is a lot, 0 is invalid) + /// @param new_brightness The curve to adjust the brightness (from -1 to +1, 0 is default/"off") + /// @param new_contrast The curve to adjust the contrast (3 is typical, 20 is a lot, 100 is max. 0 is invalid) Brightness(Keyframe new_brightness, Keyframe new_contrast); /// @brief This method is required for all derived classes of EffectBase, and returns a /// modified openshot::Frame object /// /// The frame object is passed into this method, and a frame_number is passed in which - /// tells the effect which settings to use from it's keyframes (starting at 1). + /// tells the effect which settings to use from its keyframes (starting at 1). /// /// @returns The modified openshot::Frame object /// @param frame The frame object that needs the effect applied to it @@ -88,14 +89,14 @@ std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number); /// Get and Set JSON methods - string Json(); ///< Generate JSON string of this object - void SetJson(string value); ///< Load JSON string into this object - Json::Value JsonValue(); ///< Generate Json::JsonValue for this object - void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object + std::string Json() const override; ///< Generate JSON string of this object + void SetJson(const std::string value); ///< Load JSON string into this object + Json::Value JsonValue() const override; ///< Generate Json::Value for this object + void SetJsonValue(const Json::Value root); ///< Load Json::Value into this object /// Get all properties for a specific frame (perfect for a UI to display the current state /// of all properties at any time) - string PropertiesJSON(int64_t requested_frame); + std::string PropertiesJSON(int64_t requested_frame) const override; }; } diff -Nru libopenshot-0.2.2+dfsg1/include/effects/ChromaKey.h libopenshot-0.2.5+dfsg1/include/effects/ChromaKey.h --- libopenshot-0.2.2+dfsg1/include/effects/ChromaKey.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/effects/ChromaKey.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for ChromaKey class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -40,8 +43,6 @@ #include "../Exceptions.h" #include "../KeyFrame.h" -using namespace std; - namespace openshot { @@ -77,7 +78,7 @@ /// modified openshot::Frame object /// /// The frame object is passed into this method, and a frame_number is passed in which - /// tells the effect which settings to use from it's keyframes (starting at 1). + /// tells the effect which settings to use from its keyframes (starting at 1). /// /// @returns The modified openshot::Frame object /// @param frame The frame object that needs the effect applied to it @@ -85,13 +86,13 @@ std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number); /// Get and Set JSON methods - string Json(); ///< Generate JSON string of this object - void SetJson(string value); ///< Load JSON string into this object - Json::Value JsonValue(); ///< Generate Json::JsonValue for this object - void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object + std::string Json() const override; ///< Generate JSON string of this object + void SetJson(const std::string value); ///< Load JSON string into this object + Json::Value JsonValue() const override; ///< Generate Json::Value for this object + void SetJsonValue(const Json::Value root); ///< Load Json::Value into this object // Get all properties for a specific frame - string PropertiesJSON(int64_t requested_frame); + std::string PropertiesJSON(int64_t requested_frame) const override; }; } diff -Nru libopenshot-0.2.2+dfsg1/include/effects/ColorShift.h libopenshot-0.2.5+dfsg1/include/effects/ColorShift.h --- libopenshot-0.2.2+dfsg1/include/effects/ColorShift.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/effects/ColorShift.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for Color Shift effect class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -37,8 +40,6 @@ #include "../KeyFrame.h" -using namespace std; - namespace openshot { @@ -84,7 +85,7 @@ /// modified openshot::Frame object /// /// The frame object is passed into this method, and a frame_number is passed in which - /// tells the effect which settings to use from it's keyframes (starting at 1). + /// tells the effect which settings to use from its keyframes (starting at 1). /// /// @returns The modified openshot::Frame object /// @param frame The frame object that needs the effect applied to it @@ -92,14 +93,14 @@ std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number); /// Get and Set JSON methods - string Json(); ///< Generate JSON string of this object - void SetJson(string value); ///< Load JSON string into this object - Json::Value JsonValue(); ///< Generate Json::JsonValue for this object - void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object + std::string Json() const override; ///< Generate JSON string of this object + void SetJson(const std::string value); ///< Load JSON string into this object + Json::Value JsonValue() const override; ///< Generate Json::Value for this object + void SetJsonValue(const Json::Value root); ///< Load Json::Value into this object /// Get all properties for a specific frame (perfect for a UI to display the current state /// of all properties at any time) - string PropertiesJSON(int64_t requested_frame); + std::string PropertiesJSON(int64_t requested_frame) const override; }; } diff -Nru libopenshot-0.2.2+dfsg1/include/effects/Crop.h libopenshot-0.2.5+dfsg1/include/effects/Crop.h --- libopenshot-0.2.2+dfsg1/include/effects/Crop.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/effects/Crop.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for Crop effect class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -38,8 +41,6 @@ #include "../KeyFrame.h" -using namespace std; - namespace openshot { @@ -69,7 +70,6 @@ /// Default constructor, which takes 4 curves. These curves animate the crop over time. /// - /// @param color The curve to adjust the color of bars /// @param left The curve to adjust the left bar size (between 0 and 1) /// @param top The curve to adjust the top bar size (between 0 and 1) /// @param right The curve to adjust the right bar size (between 0 and 1) @@ -80,7 +80,7 @@ /// modified openshot::Frame object /// /// The frame object is passed into this method, and a frame_number is passed in which - /// tells the effect which settings to use from it's keyframes (starting at 1). + /// tells the effect which settings to use from its keyframes (starting at 1). /// /// @returns The modified openshot::Frame object /// @param frame The frame object that needs the effect applied to it @@ -88,14 +88,14 @@ std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number); /// Get and Set JSON methods - string Json(); ///< Generate JSON string of this object - void SetJson(string value); ///< Load JSON string into this object - Json::Value JsonValue(); ///< Generate Json::JsonValue for this object - void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object + std::string Json() const override; ///< Generate JSON string of this object + void SetJson(const std::string value); ///< Load JSON string into this object + Json::Value JsonValue() const override; ///< Generate Json::Value for this object + void SetJsonValue(const Json::Value root); ///< Load Json::Value into this object /// Get all properties for a specific frame (perfect for a UI to display the current state /// of all properties at any time) - string PropertiesJSON(int64_t requested_frame); + std::string PropertiesJSON(int64_t requested_frame) const override; }; } diff -Nru libopenshot-0.2.2+dfsg1/include/effects/Deinterlace.h libopenshot-0.2.5+dfsg1/include/effects/Deinterlace.h --- libopenshot-0.2.2+dfsg1/include/effects/Deinterlace.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/effects/Deinterlace.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for De-interlace class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -41,8 +44,6 @@ #include "../Json.h" #include "../KeyFrame.h" -using namespace std; - namespace openshot { @@ -73,7 +74,7 @@ /// modified openshot::Frame object /// /// The frame object is passed into this method, and a frame_number is passed in which - /// tells the effect which settings to use from it's keyframes (starting at 1). + /// tells the effect which settings to use from its keyframes (starting at 1). /// /// @returns The modified openshot::Frame object /// @param frame The frame object that needs the effect applied to it @@ -81,13 +82,13 @@ std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number); /// Get and Set JSON methods - string Json(); ///< Generate JSON string of this object - void SetJson(string value); ///< Load JSON string into this object - Json::Value JsonValue(); ///< Generate Json::JsonValue for this object - void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object + std::string Json() const override; ///< Generate JSON string of this object + void SetJson(const std::string value); ///< Load JSON string into this object + Json::Value JsonValue() const override; ///< Generate Json::Value for this object + void SetJsonValue(const Json::Value root); ///< Load Json::Value into this object // Get all properties for a specific frame - string PropertiesJSON(int64_t requested_frame); + std::string PropertiesJSON(int64_t requested_frame) const override; }; } diff -Nru libopenshot-0.2.2+dfsg1/include/effects/Hue.h libopenshot-0.2.5+dfsg1/include/effects/Hue.h --- libopenshot-0.2.2+dfsg1/include/effects/Hue.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/effects/Hue.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for Hue effect class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -37,8 +40,6 @@ #include "../KeyFrame.h" -using namespace std; - namespace openshot { @@ -70,7 +71,7 @@ /// modified openshot::Frame object /// /// The frame object is passed into this method, and a frame_number is passed in which - /// tells the effect which settings to use from it's keyframes (starting at 1). + /// tells the effect which settings to use from its keyframes (starting at 1). /// /// @returns The modified openshot::Frame object /// @param frame The frame object that needs the effect applied to it @@ -78,14 +79,14 @@ std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number); /// Get and Set JSON methods - string Json(); ///< Generate JSON string of this object - void SetJson(string value); ///< Load JSON string into this object - Json::Value JsonValue(); ///< Generate Json::JsonValue for this object - void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object + std::string Json() const override; ///< Generate JSON string of this object + void SetJson(const std::string value); ///< Load JSON string into this object + Json::Value JsonValue() const override; ///< Generate Json::Value for this object + void SetJsonValue(const Json::Value root); ///< Load Json::Value into this object /// Get all properties for a specific frame (perfect for a UI to display the current state /// of all properties at any time) - string PropertiesJSON(int64_t requested_frame); + std::string PropertiesJSON(int64_t requested_frame) const override; }; } diff -Nru libopenshot-0.2.2+dfsg1/include/effects/Mask.h libopenshot-0.2.5+dfsg1/include/effects/Mask.h --- libopenshot-0.2.2+dfsg1/include/effects/Mask.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/effects/Mask.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for Mask class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -25,8 +28,8 @@ * along with OpenShot Library. If not, see . */ -#ifndef OPENSHOT_WIPE_EFFECT_H -#define OPENSHOT_WIPE_EFFECT_H +#ifndef OPENSHOT_MASK_EFFECT_H +#define OPENSHOT_MASK_EFFECT_H #include "../EffectBase.h" @@ -45,16 +48,15 @@ #include "../QtImageReader.h" #include "../ChunkReader.h" #ifdef USE_IMAGEMAGICK + #include "../MagickUtilities.h" #include "../ImageReader.h" #endif -using namespace std; - namespace openshot { /** - * @brief This class uses the ImageMagick++ libraries, to apply alpha (or transparency) masks + * @brief This class uses the image libraries to apply alpha (or transparency) masks * to any frame. It can also be animated, and used as a powerful Wipe transition. * * These masks / wipes can also be combined, such as a transparency mask on top of a clip, which @@ -91,7 +93,7 @@ /// modified openshot::Frame object /// /// The frame object is passed into this method, and a frame_number is passed in which - /// tells the effect which settings to use from it's keyframes (starting at 1). + /// tells the effect which settings to use from its keyframes (starting at 1). /// /// @returns The modified openshot::Frame object /// @param frame The frame object that needs the effect applied to it @@ -99,14 +101,14 @@ std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number); /// Get and Set JSON methods - string Json(); ///< Generate JSON string of this object - void SetJson(string value); ///< Load JSON string into this object - Json::Value JsonValue(); ///< Generate Json::JsonValue for this object - void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object + std::string Json() const override; ///< Generate JSON string of this object + void SetJson(const std::string value); ///< Load JSON string into this object + Json::Value JsonValue() const override; ///< Generate Json::Value for this object + void SetJsonValue(const Json::Value root); ///< Load Json::Value into this object /// Get all properties for a specific frame (perfect for a UI to display the current state /// of all properties at any time) - string PropertiesJSON(int64_t requested_frame); + std::string PropertiesJSON(int64_t requested_frame) const override; /// Get the reader object of the mask grayscale image ReaderBase* Reader() { return reader; }; diff -Nru libopenshot-0.2.2+dfsg1/include/effects/Negate.h libopenshot-0.2.5+dfsg1/include/effects/Negate.h --- libopenshot-0.2.2+dfsg1/include/effects/Negate.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/effects/Negate.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for Negate class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -40,8 +43,6 @@ #include "../Exceptions.h" #include "../KeyFrame.h" -using namespace std; - namespace openshot { @@ -61,7 +62,7 @@ /// modified openshot::Frame object /// /// The frame object is passed into this method, and a frame_number is passed in which - /// tells the effect which settings to use from it's keyframes (starting at 1). + /// tells the effect which settings to use from its keyframes (starting at 1). /// /// @returns The modified openshot::Frame object /// @param frame The frame object that needs the effect applied to it @@ -69,13 +70,13 @@ std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number); /// Get and Set JSON methods - string Json(); ///< Generate JSON string of this object - void SetJson(string value); ///< Load JSON string into this object - Json::Value JsonValue(); ///< Generate Json::JsonValue for this object - void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object + std::string Json() const override; ///< Generate JSON string of this object + void SetJson(const std::string value); ///< Load JSON string into this object + Json::Value JsonValue() const override; ///< Generate Json::Value for this object + void SetJsonValue(const Json::Value root); ///< Load Json::Value into this object // Get all properties for a specific frame - string PropertiesJSON(int64_t requested_frame); + std::string PropertiesJSON(int64_t requested_frame) const override; }; } diff -Nru libopenshot-0.2.2+dfsg1/include/effects/Pixelate.h libopenshot-0.2.5+dfsg1/include/effects/Pixelate.h --- libopenshot-0.2.2+dfsg1/include/effects/Pixelate.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/effects/Pixelate.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for Pixelate effect class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -38,8 +41,6 @@ #include "../KeyFrame.h" -using namespace std; - namespace openshot { @@ -79,7 +80,7 @@ /// modified openshot::Frame object /// /// The frame object is passed into this method, and a frame_number is passed in which - /// tells the effect which settings to use from it's keyframes (starting at 1). + /// tells the effect which settings to use from its keyframes (starting at 1). /// /// @returns The modified openshot::Frame object /// @param frame The frame object that needs the effect applied to it @@ -87,14 +88,14 @@ std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number); /// Get and Set JSON methods - string Json(); ///< Generate JSON string of this object - void SetJson(string value); ///< Load JSON string into this object - Json::Value JsonValue(); ///< Generate Json::JsonValue for this object - void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object + std::string Json() const override; ///< Generate JSON string of this object + void SetJson(const std::string value); ///< Load JSON string into this object + Json::Value JsonValue() const override; ///< Generate Json::Value for this object + void SetJsonValue(const Json::Value root); ///< Load Json::Value into this object /// Get all properties for a specific frame (perfect for a UI to display the current state /// of all properties at any time) - string PropertiesJSON(int64_t requested_frame); + std::string PropertiesJSON(int64_t requested_frame) const override; }; } diff -Nru libopenshot-0.2.2+dfsg1/include/effects/Saturation.h libopenshot-0.2.5+dfsg1/include/effects/Saturation.h --- libopenshot-0.2.2+dfsg1/include/effects/Saturation.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/effects/Saturation.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for Saturation class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -45,8 +48,6 @@ #include "../QtImageReader.h" #include "../ChunkReader.h" -using namespace std; - namespace openshot { @@ -77,7 +78,7 @@ /// modified openshot::Frame object /// /// The frame object is passed into this method, and a frame_number is passed in which - /// tells the effect which settings to use from it's keyframes (starting at 1). + /// tells the effect which settings to use from its keyframes (starting at 1). /// /// @returns The modified openshot::Frame object /// @param frame The frame object that needs the effect applied to it @@ -85,14 +86,14 @@ std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number); /// Get and Set JSON methods - string Json(); ///< Generate JSON string of this object - void SetJson(string value); ///< Load JSON string into this object - Json::Value JsonValue(); ///< Generate Json::JsonValue for this object - void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object + std::string Json() const override; ///< Generate JSON string of this object + void SetJson(const std::string value); ///< Load JSON string into this object + Json::Value JsonValue() const override; ///< Generate Json::Value for this object + void SetJsonValue(const Json::Value root); ///< Load Json::Value into this object /// Get all properties for a specific frame (perfect for a UI to display the current state /// of all properties at any time) - string PropertiesJSON(int64_t requested_frame); + std::string PropertiesJSON(int64_t requested_frame) const override; }; } diff -Nru libopenshot-0.2.2+dfsg1/include/effects/Shift.h libopenshot-0.2.5+dfsg1/include/effects/Shift.h --- libopenshot-0.2.2+dfsg1/include/effects/Shift.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/effects/Shift.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for Shift effect class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -37,8 +40,6 @@ #include "../KeyFrame.h" -using namespace std; - namespace openshot { @@ -73,7 +74,7 @@ /// modified openshot::Frame object /// /// The frame object is passed into this method, and a frame_number is passed in which - /// tells the effect which settings to use from it's keyframes (starting at 1). + /// tells the effect which settings to use from its keyframes (starting at 1). /// /// @returns The modified openshot::Frame object /// @param frame The frame object that needs the effect applied to it @@ -81,14 +82,14 @@ std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number); /// Get and Set JSON methods - string Json(); ///< Generate JSON string of this object - void SetJson(string value); ///< Load JSON string into this object - Json::Value JsonValue(); ///< Generate Json::JsonValue for this object - void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object + std::string Json() const override; ///< Generate JSON string of this object + void SetJson(const std::string value); ///< Load JSON string into this object + Json::Value JsonValue() const override; ///< Generate Json::Value for this object + void SetJsonValue(const Json::Value root); ///< Load Json::Value into this object /// Get all properties for a specific frame (perfect for a UI to display the current state /// of all properties at any time) - string PropertiesJSON(int64_t requested_frame); + std::string PropertiesJSON(int64_t requested_frame) const override; }; } diff -Nru libopenshot-0.2.2+dfsg1/include/effects/Wave.h libopenshot-0.2.5+dfsg1/include/effects/Wave.h --- libopenshot-0.2.2+dfsg1/include/effects/Wave.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/effects/Wave.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for Wave effect class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -37,8 +40,6 @@ #include "../KeyFrame.h" -using namespace std; - namespace openshot { @@ -79,7 +80,7 @@ /// modified openshot::Frame object /// /// The frame object is passed into this method, and a frame_number is passed in which - /// tells the effect which settings to use from it's keyframes (starting at 1). + /// tells the effect which settings to use from its keyframes (starting at 1). /// /// @returns The modified openshot::Frame object /// @param frame The frame object that needs the effect applied to it @@ -87,14 +88,14 @@ std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number); /// Get and Set JSON methods - string Json(); ///< Generate JSON string of this object - void SetJson(string value); ///< Load JSON string into this object - Json::Value JsonValue(); ///< Generate Json::JsonValue for this object - void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object + std::string Json() const override; ///< Generate JSON string of this object + void SetJson(const std::string value); ///< Load JSON string into this object + Json::Value JsonValue() const override; ///< Generate Json::Value for this object + void SetJsonValue(const Json::Value root); ///< Load Json::Value into this object /// Get all properties for a specific frame (perfect for a UI to display the current state /// of all properties at any time) - string PropertiesJSON(int64_t requested_frame); + std::string PropertiesJSON(int64_t requested_frame) const override; }; } diff -Nru libopenshot-0.2.2+dfsg1/include/Effects.h libopenshot-0.2.5+dfsg1/include/Effects.h --- libopenshot-0.2.2+dfsg1/include/Effects.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/Effects.h 2020-03-03 08:00:06.000000000 +0000 @@ -6,9 +6,12 @@ * @brief This header includes all commonly used effects for libopenshot, for ease-of-use. * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the diff -Nru libopenshot-0.2.2+dfsg1/include/Enums.h libopenshot-0.2.5+dfsg1/include/Enums.h --- libopenshot-0.2.2+dfsg1/include/Enums.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/Enums.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for TextReader class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the diff -Nru libopenshot-0.2.2+dfsg1/include/Exceptions.h libopenshot-0.2.5+dfsg1/include/Exceptions.h --- libopenshot-0.2.2+dfsg1/include/Exceptions.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/Exceptions.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for all Exception classes * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -29,7 +32,6 @@ #define OPENSHOT_EXCEPTIONS_H #include -using namespace std; namespace openshot { @@ -42,11 +44,11 @@ class BaseException : public std::exception //: public exception { protected: - string m_message; + std::string m_message; public: - BaseException(string message) : m_message(message) { } - virtual ~BaseException() throw () {} - virtual const char* what() const throw () { + BaseException(std::string message) : m_message(message) { } + virtual ~BaseException() noexcept {} + virtual const char* what() const noexcept { // return custom message return m_message.c_str(); } @@ -56,13 +58,20 @@ class ChunkNotFound : public BaseException { public: - string file_path; int64_t frame_number; int64_t chunk_number; int64_t chunk_frame; - ChunkNotFound(string message, int64_t frame_number, int64_t chunk_number, int64_t chunk_frame) + /** + * @brief Constructor + * + * @param message A message to accompany the exception + * @param frame_number The frame number being processed + * @param chunk_number The chunk requested + * @param chunk_frame The chunk frame + */ + ChunkNotFound(std::string message, int64_t frame_number, int64_t chunk_number, int64_t chunk_frame) : BaseException(message), frame_number(frame_number), chunk_number(chunk_number), chunk_frame(chunk_frame) { } - virtual ~ChunkNotFound() throw () {} + virtual ~ChunkNotFound() noexcept {} }; @@ -70,132 +79,206 @@ class DecklinkError : public BaseException { public: - DecklinkError(string message) + /** + * @brief Constructor + * + * @param message A message to accompany the exception + */ + DecklinkError(std::string message) : BaseException(message) { } - virtual ~DecklinkError() throw () {} + virtual ~DecklinkError() noexcept {} }; /// Exception when decoding audio packet class ErrorDecodingAudio : public BaseException { public: - string file_path; int64_t frame_number; - ErrorDecodingAudio(string message, int64_t frame_number) + /** + * @brief Constructor + * + * @param message A message to accompany the exception + * @param frame_number The frame number being processed + */ + ErrorDecodingAudio(std::string message, int64_t frame_number) : BaseException(message), frame_number(frame_number) { } - virtual ~ErrorDecodingAudio() throw () {} + virtual ~ErrorDecodingAudio() noexcept {} }; /// Exception when encoding audio packet class ErrorEncodingAudio : public BaseException { public: - string file_path; int64_t frame_number; - ErrorEncodingAudio(string message, int64_t frame_number) + /** + * @brief Constructor + * + * @param message A message to accompany the exception + * @param frame_number The frame number being processed + */ + ErrorEncodingAudio(std::string message, int64_t frame_number) : BaseException(message), frame_number(frame_number) { } - virtual ~ErrorEncodingAudio() throw () {} + virtual ~ErrorEncodingAudio() noexcept {} }; /// Exception when encoding audio packet class ErrorEncodingVideo : public BaseException { public: - string file_path; int64_t frame_number; - ErrorEncodingVideo(string message, int64_t frame_number) + /** + * @brief Constructor + * + * @param message A message to accompany the exception + * @param frame_number The frame number being processed + */ + ErrorEncodingVideo(std::string message, int64_t frame_number) : BaseException(message), frame_number(frame_number) { } - virtual ~ErrorEncodingVideo() throw () {} + virtual ~ErrorEncodingVideo() noexcept {} }; /// Exception when an invalid # of audio channels are detected class InvalidChannels : public BaseException { public: - string file_path; - InvalidChannels(string message, string file_path) + std::string file_path; + /** + * @brief Constructor + * + * @param message A message to accompany the exception + * @param file_path (optional) The input file being processed + */ + InvalidChannels(std::string message, std::string file_path="") : BaseException(message), file_path(file_path) { } - virtual ~InvalidChannels() throw () {} + virtual ~InvalidChannels() noexcept {} }; /// Exception when no valid codec is found for a file class InvalidCodec : public BaseException { public: - string file_path; - InvalidCodec(string message, string file_path) + std::string file_path; + /** + * @brief Constructor + * + * @param message A message to accompany the exception + * @param file_path (optional) The input file being processed + */ + InvalidCodec(std::string message, std::string file_path="") : BaseException(message), file_path(file_path) { } - virtual ~InvalidCodec() throw () {} + virtual ~InvalidCodec() noexcept {} }; /// Exception for files that can not be found or opened class InvalidFile : public BaseException { public: - string file_path; - InvalidFile(string message, string file_path) + std::string file_path; + /** + * @brief Constructor + * + * @param message A message to accompany the exception + * @param file_path The input file being processed + */ + InvalidFile(std::string message, std::string file_path) : BaseException(message), file_path(file_path) { } - virtual ~InvalidFile() throw () {} + virtual ~InvalidFile() noexcept {} }; /// Exception when no valid format is found for a file class InvalidFormat : public BaseException { public: - string file_path; - InvalidFormat(string message, string file_path) + std::string file_path; + /** + * @brief Constructor + * + * @param message A message to accompany the exception + * @param file_path (optional) The input file being processed + */ + InvalidFormat(std::string message, std::string file_path="") : BaseException(message), file_path(file_path) { } - virtual ~InvalidFormat() throw () {} + virtual ~InvalidFormat() noexcept {} }; /// Exception for invalid JSON class InvalidJSON : public BaseException { public: - string file_path; - InvalidJSON(string message, string file_path) + std::string file_path; + /** + * @brief Constructor + * + * @param message A message to accompany the exception + * @param file_path (optional) The input file being processed + */ + InvalidJSON(std::string message, std::string file_path="") : BaseException(message), file_path(file_path) { } - virtual ~InvalidJSON() throw () {} + virtual ~InvalidJSON() noexcept {} }; /// Exception when invalid encoding options are used class InvalidOptions : public BaseException { public: - string file_path; - InvalidOptions(string message, string file_path) + std::string file_path; + /** + * @brief Constructor + * + * @param message A message to accompany the exception + * @param file_path (optional) The input file being processed + */ + InvalidOptions(std::string message, std::string file_path="") : BaseException(message), file_path(file_path) { } - virtual ~InvalidOptions() throw () {} + virtual ~InvalidOptions() noexcept {} }; /// Exception when invalid sample rate is detected during encoding class InvalidSampleRate : public BaseException { public: - string file_path; - InvalidSampleRate(string message, string file_path) + std::string file_path; + /** + * @brief Constructor + * + * @param message A message to accompany the exception + * @param file_path (optional) The input file being processed + */ + InvalidSampleRate(std::string message, std::string file_path="") : BaseException(message), file_path(file_path) { } - virtual ~InvalidSampleRate() throw () {} + virtual ~InvalidSampleRate() noexcept {} }; /// Exception for missing JSON Change key class InvalidJSONKey : public BaseException { public: - string json; - InvalidJSONKey(string message, string json) + std::string json; + /** + * @brief Constructor + * + * @param message A message to accompany the exception + * @param json The json data being processed + */ + InvalidJSONKey(std::string message, std::string json) : BaseException(message), json(json) { } - virtual ~InvalidJSONKey() throw () {} + virtual ~InvalidJSONKey() noexcept {} }; /// Exception when no streams are found in the file class NoStreamsFound : public BaseException { public: - string file_path; - NoStreamsFound(string message, string file_path) + std::string file_path; + /** + * @brief Constructor + * + * @param message A message to accompany the exception + * @param file_path (optional) The input file being processed + */ + NoStreamsFound(std::string message, std::string file_path="") : BaseException(message), file_path(file_path) { } - virtual ~NoStreamsFound() throw () {} + virtual ~NoStreamsFound() noexcept {} }; /// Exception for frames that are out of bounds. @@ -204,9 +287,16 @@ public: int64_t FrameRequested; int64_t MaxFrames; - OutOfBoundsFrame(string message, int64_t frame_requested, int64_t max_frames) + /** + * @brief Constructor + * + * @param message A message to accompany the exception + * @param frame_requested The out-of-bounds frame number requested + * @param max_frames The maximum available frame number + */ + OutOfBoundsFrame(std::string message, int64_t frame_requested, int64_t max_frames) : BaseException(message), FrameRequested(frame_requested), MaxFrames(max_frames) { } - virtual ~OutOfBoundsFrame() throw () {} + virtual ~OutOfBoundsFrame() noexcept {} }; /// Exception for an out of bounds key-frame point. @@ -215,59 +305,96 @@ public: int PointRequested; int MaxPoints; - OutOfBoundsPoint(string message, int point_requested, int max_points) + /** + * @brief Constructor + * + * @param message A message to accompany the exception + * @param point_requested The out-of-bounds point requested + * @param max_points The maximum available point value + */ + OutOfBoundsPoint(std::string message, int point_requested, int max_points) : BaseException(message), PointRequested(point_requested), MaxPoints(max_points) { } - virtual ~OutOfBoundsPoint() throw () {} + virtual ~OutOfBoundsPoint() noexcept {} }; /// Exception when memory could not be allocated class OutOfMemory : public BaseException { public: - string file_path; - OutOfMemory(string message, string file_path) + std::string file_path; + /** + * @brief Constructor + * + * @param message A message to accompany the exception + * @param file_path (optional) The input file being processed + */ + OutOfMemory(std::string message, std::string file_path="") : BaseException(message), file_path(file_path) { } - virtual ~OutOfMemory() throw () {} + virtual ~OutOfMemory() noexcept {} }; /// Exception when a reader is closed, and a frame is requested class ReaderClosed : public BaseException { public: - string file_path; - ReaderClosed(string message, string file_path) + std::string file_path; + /** + * @brief Constructor + * + * @param message A message to accompany the exception + * @param file_path (optional) The input file being processed + */ + ReaderClosed(std::string message, std::string file_path="") : BaseException(message), file_path(file_path) { } - virtual ~ReaderClosed() throw () {} + virtual ~ReaderClosed() noexcept {} }; /// Exception when resample fails class ResampleError : public BaseException { public: - string file_path; - ResampleError(string message, string file_path) + std::string file_path; + /** + * @brief Constructor + * + * @param message A message to accompany the exception + * @param file_path (optional) The input file being processed + */ + ResampleError(std::string message, std::string file_path="") : BaseException(message), file_path(file_path) { } - virtual ~ResampleError() throw () {} + virtual ~ResampleError() noexcept {} }; /// Exception when too many seek attempts happen class TooManySeeks : public BaseException { public: - string file_path; - TooManySeeks(string message, string file_path) + std::string file_path; + /** + * @brief Constructor + * + * @param message A message to accompany the exception + * @param file_path (optional) The input file being processed + */ + TooManySeeks(std::string message, std::string file_path="") : BaseException(message), file_path(file_path) { } - virtual ~TooManySeeks() throw () {} + virtual ~TooManySeeks() noexcept {} }; /// Exception when a writer is closed, and a frame is requested class WriterClosed : public BaseException { public: - string file_path; - WriterClosed(string message, string file_path) + std::string file_path; + /** + * @brief Constructor + * + * @param message A message to accompany the exception + * @param file_path (optional) The output file being written + */ + WriterClosed(std::string message, std::string file_path="") : BaseException(message), file_path(file_path) { } - virtual ~WriterClosed() throw () {} + virtual ~WriterClosed() noexcept {} }; } diff -Nru libopenshot-0.2.2+dfsg1/include/FFmpegReader.h libopenshot-0.2.5+dfsg1/include/FFmpegReader.h --- libopenshot-0.2.2+dfsg1/include/FFmpegReader.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/FFmpegReader.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for FFmpegReader class * @author Jonathan Thomas , Fabrice Bellard * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2013 OpenShot Studios, LLC, Fabrice Bellard + * Copyright (c) 2008-2019 OpenShot Studios, LLC, Fabrice Bellard * (http://www.openshotstudios.com). This file is part of * OpenShot Library (http://www.openshot.org), an open-source project * dedicated to delivering high quality video editing and animation solutions @@ -42,24 +45,23 @@ #include #include #include "CacheMemory.h" +#include "Clip.h" #include "Exceptions.h" #include "OpenMPUtilities.h" +#include "Settings.h" -using namespace std; - -namespace openshot -{ +namespace openshot { /** * @brief This struct holds the associated video frame and starting sample # for an audio packet. * * Because audio packets do not match up with video frames, this helps determine exactly * where the audio packet's samples belong. */ - struct AudioLocation - { + struct AudioLocation { int64_t frame; int sample_start; + bool is_near(AudioLocation location, int samples_per_frame, int64_t amount); }; @@ -74,11 +76,11 @@ * * @code * // Create a reader for a video - * FFmpegReader r("MyAwesomeVideo.webm"); + * openshot::FFmpegReader r("MyAwesomeVideo.webm"); * r.Open(); // Open the reader * * // Get frame number 1 from the video - * std::shared_ptr f = r.GetFrame(1); + * std::shared_ptr f = r.GetFrame(1); * * // Now that we have an openshot::Frame object, lets have some fun! * f->Display(); // Display the frame on the screen @@ -89,14 +91,16 @@ * r.Close(); * @endcode */ - class FFmpegReader : public ReaderBase - { + class FFmpegReader : public ReaderBase { private: - string path; + std::string path; AVFormatContext *pFormatCtx; int i, videoStream, audioStream; AVCodecContext *pCodecCtx, *aCodecCtx; +#if HAVE_HW_ACCEL + AVBufferRef *hw_device_ctx = NULL; //PM +#endif AVStream *pStream, *aStream; AVPacket *packet; AVFrame *pFrame; @@ -105,19 +109,18 @@ bool check_interlace; bool check_fps; bool has_missing_frames; - bool use_omp_threads; CacheMemory working_cache; CacheMemory missing_frames; - map processing_video_frames; - multimap processing_audio_frames; - map processed_video_frames; - map processed_audio_frames; - multimap missing_video_frames; - multimap missing_video_frames_source; - multimap missing_audio_frames; - multimap missing_audio_frames_source; - map checked_frames; + std::map processing_video_frames; + std::multimap processing_audio_frames; + std::map processed_video_frames; + std::map processed_audio_frames; + std::multimap missing_video_frames; + std::multimap missing_video_frames_source; + std::multimap missing_audio_frames; + std::multimap missing_audio_frames_source; + std::map checked_frames; AudioLocation previous_packet_location; // DEBUG VARIABLES (FOR AUDIO ISSUES) @@ -127,7 +130,7 @@ int64_t pts_counter; int64_t num_packets_since_video_frame; int64_t num_checks_since_final; - std::shared_ptr last_video_frame; + std::shared_ptr last_video_frame; bool is_seeking; int64_t seeking_pts; @@ -141,7 +144,14 @@ int64_t video_pts_offset; int64_t last_frame; int64_t largest_frame_processed; - int64_t current_video_frame; // can't reliably use PTS of video to determine this + int64_t current_video_frame; // can't reliably use PTS of video to determine this + + int hw_de_supported = 0; // Is set by FFmpegReader +#if HAVE_HW_ACCEL + AVPixelFormat hw_de_av_pix_fmt = AV_PIX_FMT_NONE; + AVHWDeviceType hw_de_av_device_type = AV_HWDEVICE_TYPE_NONE; + int IsHardwareDecodeSupported(int codecid); +#endif /// Check for the correct frames per second value by scanning the 1st few seconds of video packets. void CheckFPS(); @@ -149,7 +159,7 @@ /// Check the current seek position and determine if we need to seek again bool CheckSeek(bool is_video); - /// Check if a frame is missing and attempt to replace it's frame image (and + /// Check if a frame is missing and attempt to replace its frame image (and bool CheckMissingFrame(int64_t requested_frame); /// Check the working queue, and move finished frames to the finished queue @@ -165,7 +175,7 @@ int64_t ConvertVideoPTStoFrame(int64_t pts); /// Create a new Frame (or return an existing one) and add it to the working queue. - std::shared_ptr CreateFrame(int64_t requested_frame); + std::shared_ptr CreateFrame(int64_t requested_frame); /// Calculate Starting video frame and sample # for an audio PTS AudioLocation GetAudioPTSLocation(int64_t pts); @@ -195,13 +205,13 @@ void ProcessAudioPacket(int64_t requested_frame, int64_t target_frame, int starting_sample); /// Read the stream until we find the requested Frame - std::shared_ptr ReadStream(int64_t requested_frame); + std::shared_ptr ReadStream(int64_t requested_frame); - /// Remove AVFrame from cache (and deallocate it's memory) - void RemoveAVFrame(AVFrame*); + /// Remove AVFrame from cache (and deallocate its memory) + void RemoveAVFrame(AVFrame *); - /// Remove AVPacket from cache (and deallocate it's memory) - void RemoveAVPacket(AVPacket*); + /// Remove AVPacket from cache (and deallocate its memory) + void RemoveAVPacket(AVPacket *); /// Seek to a specific Frame. This is not always frame accurate, it's more of an estimation on many codecs. void Seek(int64_t requested_frame); @@ -225,39 +235,39 @@ /// Constructor for FFmpegReader. This automatically opens the media file and loads /// frame 1, or it throws one of the following exceptions. - FFmpegReader(string path); + FFmpegReader(std::string path); - /// Constructor for FFmpegReader. This only opens the media file to inspect it's properties + /// Constructor for FFmpegReader. This only opens the media file to inspect its properties /// if inspect_reader=true. When not inspecting the media file, it's much faster, and useful /// when you are inflating the object using JSON after instantiating it. - FFmpegReader(string path, bool inspect_reader); + FFmpegReader(std::string path, bool inspect_reader); /// Destructor - ~FFmpegReader(); + virtual ~FFmpegReader(); /// Close File void Close(); /// Get the cache object used by this reader - CacheMemory* GetCache() { return &final_cache; }; + CacheMemory *GetCache() { return &final_cache; }; /// Get a shared pointer to a openshot::Frame object for a specific frame number of this reader. /// /// @returns The requested frame of video /// @param requested_frame The frame number that is requested. - std::shared_ptr GetFrame(int64_t requested_frame); + std::shared_ptr GetFrame(int64_t requested_frame); /// Determine if reader is open or closed bool IsOpen() { return is_open; }; /// Return the type name of the class - string Name() { return "FFmpegReader"; }; + std::string Name() { return "FFmpegReader"; }; /// Get and Set JSON methods - string Json(); ///< Generate JSON string of this object - void SetJson(string value); ///< Load JSON string into this object - Json::Value JsonValue(); ///< Generate Json::JsonValue for this object - void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object + std::string Json() const override; ///< Generate JSON string of this object + void SetJson(const std::string value); ///< Load JSON string into this object + Json::Value JsonValue() const override; ///< Generate Json::Value for this object + void SetJsonValue(const Json::Value root); ///< Load Json::Value into this object /// Open File - which is called by the constructor automatically void Open(); diff -Nru libopenshot-0.2.2+dfsg1/include/FFmpegUtilities.h libopenshot-0.2.5+dfsg1/include/FFmpegUtilities.h --- libopenshot-0.2.2+dfsg1/include/FFmpegUtilities.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/FFmpegUtilities.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for FFmpegUtilities * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -37,11 +40,18 @@ #ifndef IS_FFMPEG_3_2 #define IS_FFMPEG_3_2 (LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(57, 64, 101)) #endif + + #ifndef HAVE_HW_ACCEL + #define HAVE_HW_ACCEL (LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(57, 107, 100)) + #endif // Include the FFmpeg headers extern "C" { #include #include + #if (LIBAVFORMAT_VERSION_MAJOR >= 57) + #include //PM + #endif #include // Change this to the first version swrescale works #if (LIBAVFORMAT_VERSION_MAJOR >= 57) @@ -85,12 +95,11 @@ #endif // This wraps an unsafe C macro to be C++ compatible function - static const std::string av_make_error_string(int errnum) + inline static const std::string av_make_error_string(int errnum) { char errbuf[AV_ERROR_MAX_STRING_SIZE]; av_strerror(errnum, errbuf, AV_ERROR_MAX_STRING_SIZE); - std::string errstring(errbuf); - return errstring; + return (std::string)errbuf; } // Redefine the C macro to use our new C++ function @@ -100,13 +109,13 @@ // Define this for compatibility #ifndef PixelFormat #define PixelFormat AVPixelFormat - #endif + #endif #ifndef PIX_FMT_RGBA #define PIX_FMT_RGBA AV_PIX_FMT_RGBA - #endif + #endif #ifndef PIX_FMT_NONE #define PIX_FMT_NONE AV_PIX_FMT_NONE - #endif + #endif #ifndef PIX_FMT_RGB24 #define PIX_FMT_RGB24 AV_PIX_FMT_RGB24 #endif @@ -114,6 +123,13 @@ #define PIX_FMT_YUV420P AV_PIX_FMT_YUV420P #endif + // FFmpeg's libavutil/common.h defines an RSHIFT incompatible with Ruby's + // definition in ruby/config.h, so we move it to FF_RSHIFT + #ifdef RSHIFT + #define FF_RSHIFT(a, b) RSHIFT(a, b) + #undef RSHIFT + #endif + #ifdef USE_SW #define SWR_CONVERT(ctx, out, linesize, out_count, in, linesize2, in_count) \ swr_convert(ctx, out, out_count, (const uint8_t **)in, in_count) @@ -137,11 +153,12 @@ #define AV_REGISTER_ALL #define AVCODEC_REGISTER_ALL #define AV_FILENAME url + #define AV_SET_FILENAME(oc, f) oc->AV_FILENAME = av_strdup(f) #define MY_INPUT_BUFFER_PADDING_SIZE AV_INPUT_BUFFER_PADDING_SIZE #define AV_ALLOCATE_FRAME() av_frame_alloc() #define AV_ALLOCATE_IMAGE(av_frame, pix_fmt, width, height) av_image_alloc(av_frame->data, av_frame->linesize, width, height, pix_fmt, 1) #define AV_RESET_FRAME(av_frame) av_frame_unref(av_frame) - #define AV_FREE_FRAME(av_frame) av_frame_free(av_frame) + #define AV_FREE_FRAME(av_frame) av_frame_free(av_frame) #define AV_FREE_PACKET(av_packet) av_packet_unref(av_packet) #define AV_FREE_CONTEXT(av_context) avcodec_free_context(&av_context) #define AV_GET_CODEC_TYPE(av_stream) av_stream->codecpar->codec_type @@ -172,11 +189,12 @@ #define AV_REGISTER_ALL av_register_all(); #define AVCODEC_REGISTER_ALL avcodec_register_all(); #define AV_FILENAME filename + #define AV_SET_FILENAME(oc, f) snprintf(oc->AV_FILENAME, sizeof(oc->AV_FILENAME), "%s", f) #define MY_INPUT_BUFFER_PADDING_SIZE FF_INPUT_BUFFER_PADDING_SIZE #define AV_ALLOCATE_FRAME() av_frame_alloc() #define AV_ALLOCATE_IMAGE(av_frame, pix_fmt, width, height) av_image_alloc(av_frame->data, av_frame->linesize, width, height, pix_fmt, 1) #define AV_RESET_FRAME(av_frame) av_frame_unref(av_frame) - #define AV_FREE_FRAME(av_frame) av_frame_free(av_frame) + #define AV_FREE_FRAME(av_frame) av_frame_free(av_frame) #define AV_FREE_PACKET(av_packet) av_packet_unref(av_packet) #define AV_FREE_CONTEXT(av_context) avcodec_free_context(&av_context) #define AV_GET_CODEC_TYPE(av_stream) av_stream->codecpar->codec_type @@ -199,19 +217,23 @@ #define AV_FORMAT_NEW_STREAM(oc, st_codec, av_codec, av_st) av_st = avformat_new_stream(oc, NULL);\ if (!av_st) \ throw OutOfMemory("Could not allocate memory for the video stream.", path); \ - c = avcodec_alloc_context3(av_codec); \ - st_codec = c; \ - av_st->codecpar->codec_id = av_codec->id; + _Pragma ("GCC diagnostic push"); \ + _Pragma ("GCC diagnostic ignored \"-Wdeprecated-declarations\""); \ + avcodec_get_context_defaults3(av_st->codec, av_codec); \ + c = av_st->codec; \ + _Pragma ("GCC diagnostic pop"); \ + st_codec = c; #define AV_COPY_PARAMS_FROM_CONTEXT(av_stream, av_codec) avcodec_parameters_from_context(av_stream->codecpar, av_codec); #elif LIBAVFORMAT_VERSION_MAJOR >= 55 #define AV_REGISTER_ALL av_register_all(); #define AVCODEC_REGISTER_ALL avcodec_register_all(); #define AV_FILENAME filename + #define AV_SET_FILENAME(oc, f) snprintf(oc->AV_FILENAME, sizeof(oc->AV_FILENAME), "%s", f) #define MY_INPUT_BUFFER_PADDING_SIZE FF_INPUT_BUFFER_PADDING_SIZE #define AV_ALLOCATE_FRAME() av_frame_alloc() #define AV_ALLOCATE_IMAGE(av_frame, pix_fmt, width, height) avpicture_alloc((AVPicture *) av_frame, pix_fmt, width, height) #define AV_RESET_FRAME(av_frame) av_frame_unref(av_frame) - #define AV_FREE_FRAME(av_frame) av_frame_free(av_frame) + #define AV_FREE_FRAME(av_frame) av_frame_free(av_frame) #define AV_FREE_PACKET(av_packet) av_packet_unref(av_packet) #define AV_FREE_CONTEXT(av_context) avcodec_close(av_context) #define AV_GET_CODEC_TYPE(av_stream) av_stream->codec->codec_type @@ -237,6 +259,7 @@ #define AV_REGISTER_ALL av_register_all(); #define AVCODEC_REGISTER_ALL avcodec_register_all(); #define AV_FILENAME filename + #define AV_SET_FILENAME(oc, f) snprintf(oc->AV_FILENAME, sizeof(oc->AV_FILENAME), "%s", f) #define MY_INPUT_BUFFER_PADDING_SIZE FF_INPUT_BUFFER_PADDING_SIZE #define AV_ALLOCATE_FRAME() avcodec_alloc_frame() #define AV_ALLOCATE_IMAGE(av_frame, pix_fmt, width, height) avpicture_alloc((AVPicture *) av_frame, pix_fmt, width, height) diff -Nru libopenshot-0.2.2+dfsg1/include/FFmpegWriter.h libopenshot-0.2.5+dfsg1/include/FFmpegWriter.h --- libopenshot-0.2.2+dfsg1/include/FFmpegWriter.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/FFmpegWriter.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for FFmpegWriter class * @author Jonathan Thomas , Fabrice Bellard * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2013 OpenShot Studios, LLC, Fabrice Bellard + * Copyright (c) 2008-2019 OpenShot Studios, LLC, Fabrice Bellard * (http://www.openshotstudios.com). This file is part of * OpenShot Library (http://www.openshot.org), an open-source project * dedicated to delivering high quality video editing and animation solutions @@ -51,18 +54,15 @@ #include "Exceptions.h" #include "OpenMPUtilities.h" #include "ZmqLogger.h" +#include "Settings.h" -using namespace std; - -namespace openshot -{ +namespace openshot { /// This enumeration designates the type of stream when encoding (video or audio) - enum StreamType - { - VIDEO_STREAM, ///< A video stream (used to determine which type of stream) - AUDIO_STREAM ///< An audio stream (used to determine which type of stream) + enum StreamType { + VIDEO_STREAM, ///< A video stream (used to determine which type of stream) + AUDIO_STREAM ///< An audio stream (used to determine which type of stream) }; /** @@ -75,15 +75,19 @@ * @code SIMPLE EXAMPLE * * // Create a reader for a video - * FFmpegReader r("MyAwesomeVideo.webm"); - * r.Open(); // Open thetarget_ reader + * openshot::FFmpegReader r("MyAwesomeVideo.webm"); + * r.Open(); // Open the target reader * * // Create a writer (which will create a WebM video) - * FFmpegWriter w("/home/jonathan/NewVideo.webm"); + * openshot::FFmpegWriter w("/home/jonathan/NewVideo.webm"); * * // Set options - * w.SetAudioOptions(true, "libvorbis", 44100, 2, 128000); // Sample Rate: 44100, Channels: 2, Bitrate: 128000 - * w.SetVideoOptions(true, "libvpx", openshot::Fraction(24,1), 720, 480, openshot::Fraction(1,1), false, false, 300000); // FPS: 24, Size: 720x480, Pixel Ratio: 1/1, Bitrate: 300000 + * + * // Sample Rate: 44100, Channels: 2, Bitrate: 128000 + * w.SetAudioOptions(true, "libvorbis", 44100, 2, openshot::ChannelLayout::LAYOUT_STEREO, 128000); + * + * // FPS: 24, Size: 720x480, Pixel Ratio: 1/1, Bitrate: 300000 + * w.SetVideoOptions(true, "libvpx", openshot::Fraction(24,1), 720, 480, openshot::Fraction(1,1), false, false, 300000); * * // Open the writer * w.Open(); @@ -102,15 +106,19 @@ * @code ADVANCED WRITER EXAMPLE * * // Create a reader for a video - * FFmpegReader r("MyAwesomeVideo.webm"); + * openshot::FFmpegReader r("MyAwesomeVideo.webm"); * r.Open(); // Open the reader * * // Create a writer (which will create a WebM video) - * FFmpegWriter w("/home/jonathan/NewVideo.webm"); + * openshot::FFmpegWriter w("/home/jonathan/NewVideo.webm"); * * // Set options - * w.SetAudioOptions(true, "libvorbis", 44100, 2, 128000); // Sample Rate: 44100, Channels: 2, Bitrate: 128000 - * w.SetVideoOptions(true, "libvpx", openshot::Fraction(24,1), 720, 480, openshot::Fraction(1,1), false, false, 300000); // FPS: 24, Size: 720x480, Pixel Ratio: 1/1, Bitrate: 300000 + * + * // Sample Rate: 44100, Channels: 2, Bitrate: 128000 + * w.SetAudioOptions(true, "libvorbis", 44100, 2, openshot::ChannelLayout::LAYOUT_STEREO, 128000); + * + * // FPS: 24, Size: 720x480, Pixel Ratio: 1/1, Bitrate: 300000 + * w.SetVideoOptions(true, "libvpx", openshot::Fraction(24,1), 720, 480, openshot::Fraction(1,1), false, false, 300000); * * // Prepare Streams (Optional method that must be called before any SetOption calls) * w.PrepareStreams(); @@ -140,10 +148,9 @@ * r.Close(); * @endcode */ - class FFmpegWriter : public WriterBase - { + class FFmpegWriter : public WriterBase { private: - string path; + std::string path; int cache_size; bool is_writing; bool is_open; @@ -154,56 +161,56 @@ bool write_header; bool write_trailer; - AVOutputFormat *fmt; - AVFormatContext *oc; - AVStream *audio_st, *video_st; - AVCodecContext *video_codec; - AVCodecContext *audio_codec; - SwsContext *img_convert_ctx; - double audio_pts, video_pts; - int16_t *samples; - uint8_t *audio_outbuf; - uint8_t *audio_encoder_buffer; + AVOutputFormat *fmt; + AVFormatContext *oc; + AVStream *audio_st, *video_st; + AVCodecContext *video_codec; + AVCodecContext *audio_codec; + SwsContext *img_convert_ctx; + double audio_pts, video_pts; + int16_t *samples; + uint8_t *audio_outbuf; + uint8_t *audio_encoder_buffer; - int num_of_rescalers; + int num_of_rescalers; int rescaler_position; - vector image_rescalers; + std::vector image_rescalers; - int audio_outbuf_size; - int audio_input_frame_size; - int initial_audio_input_frame_size; - int audio_input_position; - int audio_encoder_buffer_size; - SWRCONTEXT *avr; - SWRCONTEXT *avr_planar; - - /* Resample options */ - int original_sample_rate; - int original_channels; - - std::shared_ptr last_frame; - deque > spooled_audio_frames; - deque > spooled_video_frames; - - deque > queued_audio_frames; - deque > queued_video_frames; + int audio_outbuf_size; + int audio_input_frame_size; + int initial_audio_input_frame_size; + int audio_input_position; + int audio_encoder_buffer_size; + SWRCONTEXT *avr; + SWRCONTEXT *avr_planar; + + /* Resample options */ + int original_sample_rate; + int original_channels; + + std::shared_ptr last_frame; + std::deque > spooled_audio_frames; + std::deque > spooled_video_frames; + + std::deque > queued_audio_frames; + std::deque > queued_video_frames; - deque > processed_frames; - deque > deallocate_frames; + std::deque > processed_frames; + std::deque > deallocate_frames; - map, AVFrame*> av_frames; + std::map, AVFrame *> av_frames; - /// Add an AVFrame to the cache - void add_avframe(std::shared_ptr frame, AVFrame* av_frame); + /// Add an AVFrame to the cache + void add_avframe(std::shared_ptr frame, AVFrame *av_frame); /// Add an audio output stream - AVStream* add_audio_stream(); + AVStream *add_audio_stream(); /// Add a video output stream - AVStream* add_video_stream(); + AVStream *add_video_stream(); /// Allocate an AVFrame object - AVFrame* allocate_avframe(PixelFormat pix_fmt, int width, int height, int *buffer_size, uint8_t *new_buffer); + AVFrame *allocate_avframe(PixelFormat pix_fmt, int width, int height, int *buffer_size, uint8_t *new_buffer); /// Auto detect format (from path) void auto_detect_format(); @@ -232,13 +239,13 @@ void open_video(AVFormatContext *oc, AVStream *st); /// process video frame - void process_video_packet(std::shared_ptr frame); + void process_video_packet(std::shared_ptr frame); /// write all queued frames' audio to the video file - void write_audio_packets(bool final); + void write_audio_packets(bool is_final); /// write video frame - bool write_video_packet(std::shared_ptr frame, AVFrame* frame_final); + bool write_video_packet(std::shared_ptr frame, AVFrame *frame_final); /// write all queued frames void write_queued_frames(); @@ -247,7 +254,7 @@ /// @brief Constructor for FFmpegWriter. Throws one of the following exceptions. /// @param path The file path of the video file you want to open and read - FFmpegWriter(string path); + FFmpegWriter(std::string path); /// Close the writer void Close(); @@ -259,7 +266,7 @@ bool IsOpen() { return is_open; }; /// Determine if codec name is valid - static bool IsValidCodec(string codec_name); + static bool IsValidCodec(std::string codec_name); /// Open writer void Open(); @@ -286,7 +293,20 @@ /// @param channels The number of audio channels needed in this file /// @param channel_layout The 'layout' of audio channels (i.e. mono, stereo, surround, etc...) /// @param bit_rate The audio bit rate used during encoding - void SetAudioOptions(bool has_audio, string codec, int sample_rate, int channels, ChannelLayout channel_layout, int bit_rate); + /// + /// \note This is an overloaded function. + void SetAudioOptions(bool has_audio, std::string codec, int sample_rate, int channels, openshot::ChannelLayout channel_layout, int bit_rate); + + /// @brief Set audio export options. + /// + /// Enables the stream and configures a default 2-channel stereo layout. + /// + /// @param codec The codec used to encode the audio for this file + /// @param sample_rate The number of audio samples needed in this file + /// @param bit_rate The audio bit rate used during encoding + /// + /// \note This is an overloaded function. + void SetAudioOptions(std::string codec, int sample_rate, int bit_rate); /// @brief Set the cache size /// @param new_size The number of frames to queue before writing to the file @@ -302,14 +322,31 @@ /// @param interlaced Does this video need to be interlaced? /// @param top_field_first Which frame should be used as the top field? /// @param bit_rate The video bit rate used during encoding - void SetVideoOptions(bool has_video, string codec, Fraction fps, int width, int height,Fraction pixel_ratio, bool interlaced, bool top_field_first, int bit_rate); + /// + /// \note This is an overloaded function. + void SetVideoOptions(bool has_video, std::string codec, openshot::Fraction fps, int width, int height, openshot::Fraction pixel_ratio, bool interlaced, bool top_field_first, int bit_rate); + + /// @brief Set video export options. + /// + /// Enables the stream and configures non-interlaced video with a 1:1 pixel aspect ratio. + /// + /// @param codec The codec used to encode the images in this video + /// @param width The width in pixels of this video + /// @param height The height in pixels of this video + /// @param fps The number of frames per second + /// @param bit_rate The video bit rate used during encoding + /// + /// \note This is an overloaded function. + /// \warning Observe the argument order, which is consistent with the openshot::Timeline constructor, but differs from the other signature. + void SetVideoOptions(std::string codec, int width, int height, openshot::Fraction fps, int bit_rate); /// @brief Set custom options (some codecs accept additional params). This must be called after the /// PrepareStreams() method, otherwise the streams have not been initialized yet. + /// /// @param stream The stream (openshot::StreamType) this option should apply to /// @param name The name of the option you want to set (i.e. qmin, qmax, etc...) /// @param value The new value of this option - void SetOption(StreamType stream, string name, string value); + void SetOption(openshot::StreamType stream, std::string name, std::string value); /// @brief Write the file header (after the options are set). This method is called automatically /// by the Open() method if this method has not yet been called. @@ -317,13 +354,17 @@ /// @brief Add a frame to the stack waiting to be encoded. /// @param frame The openshot::Frame object to write to this image - void WriteFrame(std::shared_ptr frame); + /// + /// \note This is an overloaded function. + void WriteFrame(std::shared_ptr frame); /// @brief Write a block of frames from a reader /// @param reader A openshot::ReaderBase object which will provide frames to be written /// @param start The starting frame number of the reader /// @param length The number of frames to write - void WriteFrame(ReaderBase* reader, int64_t start, int64_t length); + /// + /// \note This is an overloaded function. + void WriteFrame(openshot::ReaderBase *reader, int64_t start, int64_t length); /// @brief Write the file trailer (after all frames are written). This is called automatically /// by the Close() method if this method has not yet been called. diff -Nru libopenshot-0.2.2+dfsg1/include/Fraction.h libopenshot-0.2.5+dfsg1/include/Fraction.h --- libopenshot-0.2.2+dfsg1/include/Fraction.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/Fraction.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for Fraction class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the diff -Nru libopenshot-0.2.2+dfsg1/include/Frame.h libopenshot-0.2.5+dfsg1/include/Frame.h --- libopenshot-0.2.2+dfsg1/include/Frame.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/Frame.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for Frame class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -28,15 +31,6 @@ #ifndef OPENSHOT_FRAME_H #define OPENSHOT_FRAME_H -/// Do not include the juce unittest headers, because it collides with unittest++ -#ifndef __JUCE_UNITTEST_JUCEHEADER__ - #define __JUCE_UNITTEST_JUCEHEADER__ -#endif -#ifndef _NDEBUG - // Define NO debug for JUCE on mac os - #define _NDEBUG -#endif - #include #include #include @@ -53,17 +47,14 @@ #include #include #include "ZmqLogger.h" -#ifdef USE_IMAGEMAGICK - #include "Magick++.h" -#endif -#include "JuceLibraryCode/JuceHeader.h" #include "ChannelLayouts.h" #include "AudioBufferSource.h" #include "AudioResampler.h" #include "Fraction.h" - -#pragma SWIG nowarn=362 -using namespace std; +#include "JuceHeader.h" +#ifdef USE_IMAGEMAGICK + #include "MagickUtilities.h" +#endif namespace openshot { @@ -119,16 +110,16 @@ std::shared_ptr wave_image; std::shared_ptr audio; std::shared_ptr previewApp; - CriticalSection addingImageSection; - CriticalSection addingAudioSection; + juce::CriticalSection addingImageSection; + juce::CriticalSection addingAudioSection; const unsigned char *qbuffer; - Fraction pixel_ratio; + openshot::Fraction pixel_ratio; int channels; ChannelLayout channel_layout; int width; int height; int sample_rate; - string color; + std::string color; int64_t max_audio_sample; ///< The max audio sample count added to this frame /// Constrain a color value from 0 to 255 @@ -144,13 +135,13 @@ Frame(); /// Constructor - image only (48kHz audio silence) - Frame(int64_t number, int width, int height, string color); + Frame(int64_t number, int width, int height, std::string color); /// Constructor - audio only (300x200 blank image) Frame(int64_t number, int samples, int channels); /// Constructor - image & audio - Frame(int64_t number, int width, int height, string color, int samples, int channels); + Frame(int64_t number, int width, int height, std::string color, int samples, int channels); /// Copy constructor Frame ( const Frame &other ); @@ -159,10 +150,10 @@ Frame& operator= (const Frame& other); /// Destructor - ~Frame(); + virtual ~Frame(); /// Add (or replace) pixel data to the frame (based on a solid color) - void AddColor(int new_width, int new_height, string new_color); + void AddColor(int new_width, int new_height, std::string new_color); /// Add (or replace) pixel data to the frame void AddImage(int new_width, int new_height, int bytes_per_pixel, QImage::Format type, const unsigned char *pixels_); @@ -189,15 +180,15 @@ /// Channel Layout of audio samples. A frame needs to keep track of this, since Writers do not always /// know the original channel layout of a frame's audio samples (i.e. mono, stereo, 5 point surround, etc...) - ChannelLayout ChannelsLayout(); + openshot::ChannelLayout ChannelsLayout(); // Set the channel layout of audio samples (i.e. mono, stereo, 5 point surround, etc...) - void ChannelsLayout(ChannelLayout new_channel_layout) { channel_layout = new_channel_layout; }; + void ChannelsLayout(openshot::ChannelLayout new_channel_layout) { channel_layout = new_channel_layout; }; /// Clean up buffer after QImage is deleted static void cleanUpBuffer(void *info); - /// Clear the waveform image (and deallocate it's memory) + /// Clear the waveform image (and deallocate its memory) void ClearWaveform(); /// Copy data and pointers from another Frame instance @@ -216,10 +207,10 @@ float* GetAudioSamples(int channel); /// Get an array of sample data (all channels interleaved together), using any sample rate - float* GetInterleavedAudioSamples(int new_sample_rate, AudioResampler* resampler, int* sample_count); + float* GetInterleavedAudioSamples(int new_sample_rate, openshot::AudioResampler* resampler, int* sample_count); // Get a planar array of sample data, using any sample rate - float* GetPlanarAudioSamples(int new_sample_rate, AudioResampler* resampler, int* sample_count); + float* GetPlanarAudioSamples(int new_sample_rate, openshot::AudioResampler* resampler, int* sample_count); /// Get number of audio channels int GetAudioChannelsCount(); @@ -241,7 +232,7 @@ #endif /// Set Pixel Aspect Ratio - Fraction GetPixelRatio() { return pixel_ratio; }; + openshot::Fraction GetPixelRatio() { return pixel_ratio; }; /// Get pixel data (as packets) const unsigned char* GetPixels(); @@ -249,14 +240,17 @@ /// Get pixel data (for only a single scan-line) const unsigned char* GetPixels(int row); + /// Check a specific pixel color value (returns True/False) + bool CheckPixel(int row, int col, int red, int green, int blue, int alpha, int threshold); + /// Get height of image int GetHeight(); /// Calculate the # of samples per video frame (for the current frame number) - int GetSamplesPerFrame(Fraction fps, int sample_rate, int channels); + int GetSamplesPerFrame(openshot::Fraction fps, int sample_rate, int channels); /// Calculate the # of samples per video frame (for a specific frame number and frame rate) - static int GetSamplesPerFrame(int64_t frame_number, Fraction fps, int sample_rate, int channels); + static int GetSamplesPerFrame(int64_t frame_number, openshot::Fraction fps, int sample_rate, int channels); /// Get an audio waveform image std::shared_ptr GetWaveform(int width, int height, int Red, int Green, int Blue, int Alpha); @@ -268,7 +262,7 @@ int GetWidth(); /// Resize audio container to hold more (or less) samples and channels - void ResizeAudio(int channels, int length, int sample_rate, ChannelLayout channel_layout); + void ResizeAudio(int channels, int length, int sample_rate, openshot::ChannelLayout channel_layout); /// Get the original sample rate of this frame's audio data int SampleRate(); @@ -277,7 +271,7 @@ void SampleRate(int orig_sample_rate) { sample_rate = orig_sample_rate; }; /// Save the frame image to the specified path. The image format can be BMP, JPG, JPEG, PNG, PPM, XBM, XPM - void Save(string path, float scale, string format="PNG", int quality=100); + void Save(std::string path, float scale, std::string format="PNG", int quality=100); /// Set frame number void SetFrameNumber(int64_t number); @@ -287,8 +281,8 @@ /// Thumbnail the frame image with tons of options to the specified path. The image format is determined from the extension (i.e. image.PNG, image.JPEG). /// This method allows for masks, overlays, background color, and much more accurate resizing (including padding and centering) - void Thumbnail(string path, int new_width, int new_height, string mask_path, string overlay_path, - string background_color, bool ignore_aspect, string format="png", int quality=100, float rotate=0.0); + void Thumbnail(std::string path, int new_width, int new_height, std::string mask_path, std::string overlay_path, + std::string background_color, bool ignore_aspect, std::string format="png", int quality=100, float rotate=0.0); /// Play audio samples for this frame void Play(); diff -Nru libopenshot-0.2.2+dfsg1/include/FrameMapper.h libopenshot-0.2.5+dfsg1/include/FrameMapper.h --- libopenshot-0.2.2+dfsg1/include/FrameMapper.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/FrameMapper.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for the FrameMapper class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -46,9 +49,6 @@ #include "OpenMPUtilities.h" - -using namespace std; - namespace openshot { /** @@ -163,14 +163,14 @@ public: // Init some containers - vector fields; // List of all fields - vector frames; // List of all frames + std::vector fields; // List of all fields + std::vector frames; // List of all frames /// Default constructor for openshot::FrameMapper class FrameMapper(ReaderBase *reader, Fraction target_fps, PulldownType target_pulldown, int target_sample_rate, int target_channels, ChannelLayout target_channel_layout); /// Destructor - ~FrameMapper(); + virtual ~FrameMapper(); /// Change frame rate or audio mapping details void ChangeMapping(Fraction target_fps, PulldownType pulldown, int target_sample_rate, int target_channels, ChannelLayout target_channel_layout); @@ -196,13 +196,13 @@ bool IsOpen(); /// Return the type name of the class - string Name() { return "FrameMapper"; }; + std::string Name() { return "FrameMapper"; }; /// Get and Set JSON methods - string Json(); ///< Generate JSON string of this object - void SetJson(string value); ///< Load JSON string into this object - Json::Value JsonValue(); ///< Generate Json::JsonValue for this object - void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object + std::string Json() const override; ///< Generate JSON string of this object + void SetJson(const std::string value); ///< Load JSON string into this object + Json::Value JsonValue() const override; ///< Generate Json::Value for this object + void SetJsonValue(const Json::Value root); ///< Load Json::Value into this object /// Open the internal reader void Open(); @@ -213,6 +213,9 @@ /// Get the current reader ReaderBase* Reader(); + /// Set the current reader + void Reader(ReaderBase *new_reader) { reader = new_reader; } + /// Resample audio and map channels (if needed) void ResampleMappedAudio(std::shared_ptr frame, int64_t original_frame_number); diff -Nru libopenshot-0.2.2+dfsg1/include/ImageReader.h libopenshot-0.2.5+dfsg1/include/ImageReader.h --- libopenshot-0.2.2+dfsg1/include/ImageReader.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/ImageReader.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for ImageReader class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -28,6 +31,9 @@ #ifndef OPENSHOT_IMAGE_READER_H #define OPENSHOT_IMAGE_READER_H +// Require ImageMagick support +#ifdef USE_IMAGEMAGICK + #include "ReaderBase.h" #include @@ -36,11 +42,9 @@ #include #include #include -#include "Magick++.h" #include "CacheMemory.h" #include "Exceptions.h" - -using namespace std; +#include "MagickUtilities.h" namespace openshot { @@ -67,7 +71,7 @@ class ImageReader : public ReaderBase { private: - string path; + std::string path; std::shared_ptr image; bool is_open; @@ -75,12 +79,12 @@ /// Constructor for ImageReader. This automatically opens the media file and loads /// frame 1, or it throws one of the following exceptions. - ImageReader(string path); + ImageReader(std::string path); - /// Constructor for ImageReader. This only opens the media file to inspect it's properties + /// Constructor for ImageReader. This only opens the media file to inspect its properties /// if inspect_reader=true. When not inspecting the media file, it's much faster, and useful /// when you are inflating the object using JSON after instantiating it. - ImageReader(string path, bool inspect_reader); + ImageReader(std::string path, bool inspect_reader); /// Close File void Close(); @@ -99,13 +103,13 @@ bool IsOpen() { return is_open; }; /// Return the type name of the class - string Name() { return "ImageReader"; }; + std::string Name() { return "ImageReader"; }; /// Get and Set JSON methods - string Json(); ///< Generate JSON string of this object - void SetJson(string value); ///< Load JSON string into this object - Json::Value JsonValue(); ///< Generate Json::JsonValue for this object - void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object + std::string Json() const override; ///< Generate JSON string of this object + void SetJson(const std::string value); ///< Load JSON string into this object + Json::Value JsonValue() const; ///< Generate Json::Value for this object + void SetJsonValue(const Json::Value root); ///< Load Json::Value into this object /// Open File - which is called by the constructor automatically void Open(); @@ -113,4 +117,5 @@ } -#endif +#endif //USE_IMAGEMAGICK +#endif //OPENSHOT_IMAGE_READER_H diff -Nru libopenshot-0.2.2+dfsg1/include/ImageWriter.h libopenshot-0.2.5+dfsg1/include/ImageWriter.h --- libopenshot-0.2.2+dfsg1/include/ImageWriter.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/ImageWriter.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for ImageWriter class * @author Jonathan Thomas , Fabrice Bellard * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2013 OpenShot Studios, LLC, Fabrice Bellard + * Copyright (c) 2008-2019 OpenShot Studios, LLC, Fabrice Bellard * (http://www.openshotstudios.com). This file is part of * OpenShot Library (http://www.openshot.org), an open-source project * dedicated to delivering high quality video editing and animation solutions @@ -32,10 +35,11 @@ * along with OpenShot Library. If not, see . */ - #ifndef OPENSHOT_IMAGE_WRITER_H #define OPENSHOT_IMAGE_WRITER_H +#ifdef USE_IMAGEMAGICK + #include "ReaderBase.h" #include "WriterBase.h" @@ -44,13 +48,10 @@ #include #include #include -#include "Magick++.h" #include "CacheMemory.h" #include "Exceptions.h" #include "OpenMPUtilities.h" - - -using namespace std; +#include "MagickUtilities.h" namespace openshot { @@ -85,12 +86,12 @@ class ImageWriter : public WriterBase { private: - string path; + std::string path; int cache_size; bool is_writing; bool is_open; int64_t write_video_count; - vector frames; + std::vector frames; int image_quality; int number_of_loops; bool combine_frames; @@ -101,7 +102,7 @@ /// @brief Constructor for ImageWriter. Throws one of the following exceptions. /// @param path The path of the file you want to create - ImageWriter(string path); + ImageWriter(std::string path); /// @brief Close the writer and encode/output final image to the disk. This is a requirement of ImageMagick, /// which writes all frames of a multi-frame image at one time. @@ -127,8 +128,8 @@ /// @param height Height in pixels of image /// @param quality Quality of image (0 to 100, 70 is default) /// @param loops Number of times to repeat the image (used on certain multi-frame image formats, such as GIF) - /// @param combine Combine frames into a single image (if possible), or save each frame as it's own image - void SetVideoOptions(string format, Fraction fps, int width, int height, + /// @param combine Combine frames into a single image (if possible), or save each frame as its own image + void SetVideoOptions(std::string format, Fraction fps, int width, int height, int quality, int loops, bool combine); /// @brief Add a frame to the stack waiting to be encoded. @@ -145,4 +146,5 @@ } -#endif +#endif //USE_IMAGEMAGICK +#endif //OPENSHOT_IMAGE_WRITER_H diff -Nru libopenshot-0.2.2+dfsg1/include/Json.h libopenshot-0.2.5+dfsg1/include/Json.h --- libopenshot-0.2.2+dfsg1/include/Json.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/Json.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for JSON class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -28,6 +31,12 @@ #ifndef OPENSHOT_JSON_H #define OPENSHOT_JSON_H +#include #include "json/json.h" +#include "Exceptions.h" + +namespace openshot { + const Json::Value stringToJson(const std::string value); +} #endif diff -Nru libopenshot-0.2.2+dfsg1/include/KeyFrame.h libopenshot-0.2.5+dfsg1/include/KeyFrame.h --- libopenshot-0.2.2+dfsg1/include/KeyFrame.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/KeyFrame.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for the Keyframe class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -39,9 +42,6 @@ #include "Point.h" #include "Json.h" -using namespace std; -using namespace openshot; - namespace openshot { /** @@ -63,39 +63,14 @@ */ class Keyframe { private: - bool needs_update; - double FactorialLookup[4]; - - /* - * Because points can be added in any order, we need to reorder them - * in ascending order based on the point.co.X value. This simplifies - * processing the curve, due to all the points going from left to right. - */ - void ReorderPoints(); - - // Process an individual segment - void ProcessSegment(int Segment, Point p1, Point p2); - - // create lookup table for fast factorial calculation - void CreateFactorialTable(); - - // Get a factorial for a coordinate - double Factorial(int64_t n); - - // Calculate the factorial function for Bernstein basis - double Ni(int64_t n, int64_t i); - - // Calculate Bernstein Basis - double Bernstein(int64_t n, int64_t i, double t); + std::vector Points; ///< Vector of all Points public: - vector Points; ///< Vector of all Points - vector Values; ///< Vector of all Values (i.e. the processed coordinates from the curve) /// Default constructor for the Keyframe class - Keyframe(); + Keyframe() = default; - /// Constructor which sets the default point & coordinate at X=0 + /// Constructor which sets the default point & coordinate at X=1 Keyframe(double value); /// Add a new point on the key-frame. Each point has a primary coordinate, a left handle, and a right handle. @@ -108,67 +83,59 @@ void AddPoint(double x, double y, InterpolationType interpolate); /// Does this keyframe contain a specific point - bool Contains(Point p); + bool Contains(Point p) const; /// Flip all the points in this openshot::Keyframe (useful for reversing an effect or transition, etc...) void FlipPoints(); /// Get the index of a point by matching a coordinate - int64_t FindIndex(Point p); + int64_t FindIndex(Point p) const; /// Get the value at a specific index - double GetValue(int64_t index); + double GetValue(int64_t index) const; /// Get the rounded INT value at a specific index - int GetInt(int64_t index); + int GetInt(int64_t index) const; /// Get the rounded LONG value at a specific index - int64_t GetLong(int64_t index); + int64_t GetLong(int64_t index) const; /// Get the fraction that represents how many times this value is repeated in the curve - Fraction GetRepeatFraction(int64_t index); + Fraction GetRepeatFraction(int64_t index) const; /// Get the change in Y value (from the previous Y value) - double GetDelta(int64_t index); + double GetDelta(int64_t index) const; /// Get a point at a specific index - Point& GetPoint(int64_t index); + Point const & GetPoint(int64_t index) const; /// Get current point (or closest point to the right) from the X coordinate (i.e. the frame number) - Point GetClosestPoint(Point p); + Point GetClosestPoint(Point p) const; /// Get current point (or closest point) from the X coordinate (i.e. the frame number) /// Either use the closest left point, or right point - Point GetClosestPoint(Point p, bool useLeft); + Point GetClosestPoint(Point p, bool useLeft) const; /// Get previous point ( - Point GetPreviousPoint(Point p); + Point GetPreviousPoint(Point p) const; /// Get max point (by Y coordinate) - Point GetMaxPoint(); + Point GetMaxPoint() const; // Get the number of values (i.e. coordinates on the X axis) - int64_t GetLength(); + int64_t GetLength() const; /// Get the number of points (i.e. # of points) - int64_t GetCount(); + int64_t GetCount() const; /// Get the direction of the curve at a specific index (increasing or decreasing) - bool IsIncreasing(int index); + bool IsIncreasing(int index) const; /// Get and Set JSON methods - string Json(); ///< Generate JSON string of this object - Json::Value JsonValue(); ///< Generate Json::JsonValue for this object - void SetJson(string value); ///< Load JSON string into this object - void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object - - /** - * @brief Calculate all of the values for this keyframe. - * - * This clears any existing data in the "values" vector. This method is automatically called - * by AddPoint(), so you don't typically need to call this method. - */ - void Process(); + std::string Json() const; ///< Generate JSON string of this object + Json::Value JsonValue() const; ///< Generate Json::Value for this object + void SetJson(const std::string value); ///< Load JSON string into this object + void SetJsonValue(const Json::Value root); ///< Load Json::Value into this object /// Remove a point by matching a coordinate void RemovePoint(Point p); @@ -184,10 +151,10 @@ void UpdatePoint(int64_t index, Point p); /// Print a list of points - void PrintPoints(); + void PrintPoints() const; /// Print just the Y value of the point's primary coordinate - void PrintValues(); + void PrintValues() const; }; diff -Nru libopenshot-0.2.2+dfsg1/include/MagickUtilities.h libopenshot-0.2.5+dfsg1/include/MagickUtilities.h --- libopenshot-0.2.2+dfsg1/include/MagickUtilities.h 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/MagickUtilities.h 2020-03-03 08:00:06.000000000 +0000 @@ -0,0 +1,61 @@ +/** + * @file + * @brief Header file for MagickUtilities (IM6/IM7 compatibility overlay) + * @author Jonathan Thomas + * @author FeRD (Frank Dana) + */ + +/* LICENSE + * + * Copyright (c) 2008-2019 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#ifndef OPENSHOT_MAGICK_UTILITIES_H +#define OPENSHOT_MAGICK_UTILITIES_H + +#ifdef USE_IMAGEMAGICK + + #include "Magick++.h" + + // Determine ImageMagick version, as IM7 isn't fully + // backwards compatible + #ifndef NEW_MAGICK + #define NEW_MAGICK (MagickLibVersion >= 0x700) + #endif + + // IM7: ->alpha(bool) + // IM6: ->matte(bool) + #if NEW_MAGICK + #define MAGICK_IMAGE_ALPHA(im, a) im->alpha((a)) + #else + #define MAGICK_IMAGE_ALPHA(im, a) im->matte((a)) + #endif + + // IM7: vector + // IM6: list + // (both have the push_back() method which is all we use) + #if NEW_MAGICK + #define MAGICK_DRAWABLE std::vector + #else + #define MAGICK_DRAWABLE std::list + #endif + +#endif +#endif diff -Nru libopenshot-0.2.2+dfsg1/include/OpenMPUtilities.h libopenshot-0.2.5+dfsg1/include/OpenMPUtilities.h --- libopenshot-0.2.2+dfsg1/include/OpenMPUtilities.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/OpenMPUtilities.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for OpenMPUtilities (set some common macros) * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -32,25 +35,11 @@ #include #include -// Calculate the # of OpenMP and FFmpeg Threads to allow. We are limiting both -// of these based on our own performance tests (more is not always better). -#define OPEN_MP_NUM_PROCESSORS (min(omp_get_num_procs(), 6)) -#define FF_NUM_PROCESSORS (min(omp_get_num_procs(), 12)) - -using namespace std; - -namespace openshot { - - // Check if OS2_OMP_THREADS environment variable is present, and return - // if multiple threads should be used with OMP - static bool IsOMPEnabled() { - char* OS2_OMP_THREADS = getenv("OS2_OMP_THREADS"); - if (OS2_OMP_THREADS != NULL && strcmp(OS2_OMP_THREADS, "0") == 0) - return false; - else - return true; - } +#include "Settings.h" + +// Calculate the # of OpenMP Threads to allow +#define OPEN_MP_NUM_PROCESSORS (std::min(omp_get_num_procs(), std::max(2, openshot::Settings::Instance()->OMP_THREADS) )) +#define FF_NUM_PROCESSORS (std::min(omp_get_num_procs(), std::max(2, openshot::Settings::Instance()->FF_THREADS) )) -} #endif diff -Nru libopenshot-0.2.2+dfsg1/include/OpenShot.h libopenshot-0.2.5+dfsg1/include/OpenShot.h --- libopenshot-0.2.2+dfsg1/include/OpenShot.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/OpenShot.h 2020-03-03 08:00:06.000000000 +0000 @@ -64,15 +64,18 @@ * canvas (i.e. pan & scan). * \image html /doc/images/Timeline_Layers.png * - * ### Build Instructions (Linux, Mac, and Windows) ### - * For a step-by-step guide to building / compiling libopenshot, check out the - * Official Installation Guide. + * ### Build Instructions ### + * Build instructions are available for all three major Operating Systems: + * * [Building libopenshot for Windows](doc/INSTALL-WINDOWS.md) + * * [Building libopenshot for MacOS](doc/INSTALL-MAC.md) + * * [Building libopenshot for Linux](doc/INSTALL-LINUX.md) * * ### Want to Learn More? ### * To continue learning about libopenshot, take a look at the full list of classes available. * + * \anchor License * ### License & Copyright ### - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -93,7 +96,7 @@ */ // Include the version number of OpenShot Library -#include "Version.h" +#include "OpenShotVersion.h" // Include all other classes #include "AudioBufferSource.h" @@ -132,7 +135,10 @@ #include "PlayerBase.h" #include "Point.h" #include "Profiles.h" +#include "QtHtmlReader.h" #include "QtImageReader.h" +#include "QtTextReader.h" #include "Timeline.h" +#include "Settings.h" #endif diff -Nru libopenshot-0.2.2+dfsg1/include/OpenShotVersion.h.in libopenshot-0.2.5+dfsg1/include/OpenShotVersion.h.in --- libopenshot-0.2.2+dfsg1/include/OpenShotVersion.h.in 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/OpenShotVersion.h.in 2020-03-03 08:00:06.000000000 +0000 @@ -0,0 +1,70 @@ +/** + * @file + * @brief Header file that includes the version number of libopenshot + * @author Jonathan Thomas + * + * @ref License + */ + +/* LICENSE + * + * Copyright (c) 2008-2019 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#ifndef OPENSHOT_VERSION_H +#define OPENSHOT_VERSION_H + +#define OPENSHOT_VERSION_ALL "@PROJECT_VERSION@" /// A string of the entire version "Major.Minor.Build" +#define OPENSHOT_VERSION_FULL "@PROJECT_VERSION_FULL@" /// A string of the full version identifier, including suffixes (e.g. "0.0.0-dev0") + +#define OPENSHOT_VERSION_MAJOR_MINOR "@PROJECT_VERSION_MAJOR@.@PROJECT_VERSION_MINOR@" /// A string of the "Major.Minor" version + +#define OPENSHOT_VERSION_MAJOR @PROJECT_VERSION_MAJOR@ /// Major version number is incremented when huge features are added or improved. +#define OPENSHOT_VERSION_MINOR @PROJECT_VERSION_MINOR@ /// Minor version is incremented when smaller (but still very important) improvements are added. +#define OPENSHOT_VERSION_BUILD @PROJECT_VERSION_PATCH@ /// Build number is incremented when minor bug fixes and less important improvements are added. + +#define OPENSHOT_VERSION_SO @PROJECT_SO_VERSION@ /// Shared object version number. This increments any time the API and ABI changes (so old apps will no longer link) + +#include + +namespace openshot +{ + /// This struct holds version number information. Use the GetVersion() method to access the current version of libopenshot. + struct OpenShotVersion { + static const int Major = OPENSHOT_VERSION_MAJOR; /// Major version number + static const int Minor = OPENSHOT_VERSION_MINOR; /// Minor version number + static const int Build = OPENSHOT_VERSION_BUILD; /// Build number + static const int So = OPENSHOT_VERSION_SO; /// Shared Object Number (incremented when API or ABI changes) + + /// Get a string version of the version (i.e. "Major.Minor.Build") + inline static const std::string ToString() { + std::stringstream version_string; + version_string << Major << "." << Minor << "." << Build; + return version_string.str(); + } + }; + + static const openshot::OpenShotVersion Version; + + /// Get the current version number of libopenshot (major, minor, and build number) + openshot::OpenShotVersion GetVersion(); +} + +#endif // OPENSHOT_VERSION_H \ No newline at end of file diff -Nru libopenshot-0.2.2+dfsg1/include/PlayerBase.h libopenshot-0.2.5+dfsg1/include/PlayerBase.h --- libopenshot-0.2.2+dfsg1/include/PlayerBase.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/PlayerBase.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for PlayerBase class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -31,8 +34,6 @@ #include #include "ReaderBase.h" -using namespace std; - namespace openshot { /** @@ -60,7 +61,7 @@ protected: float speed; float volume; - ReaderBase *reader; + openshot::ReaderBase *reader; PlaybackMode mode; public: @@ -78,7 +79,7 @@ virtual void Pause() = 0; /// Get the current frame number being played - virtual int Position() = 0; + virtual int64_t Position() = 0; /// Seek to a specific frame in the player virtual void Seek(int64_t new_frame) = 0; @@ -93,10 +94,10 @@ virtual void Stop() = 0; /// Get the current reader, such as a FFmpegReader - virtual ReaderBase* Reader() = 0; + virtual openshot::ReaderBase* Reader() = 0; /// Set the current reader, such as a FFmpegReader - virtual void Reader(ReaderBase *new_reader) = 0; + virtual void Reader(openshot::ReaderBase *new_reader) = 0; /// Get the Volume virtual float Volume() = 0; @@ -104,6 +105,7 @@ /// Set the Volume (1.0 = normal volume, <1.0 = quieter, >1.0 louder) virtual void Volume(float new_volume) = 0; + virtual ~PlayerBase() = default; }; } diff -Nru libopenshot-0.2.2+dfsg1/include/Point.h libopenshot-0.2.5+dfsg1/include/Point.h --- libopenshot-0.2.2+dfsg1/include/Point.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/Point.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for Point class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -32,8 +35,6 @@ #include "Exceptions.h" #include "Json.h" -using namespace std; - namespace openshot { /** @@ -118,10 +119,10 @@ void Initialize_RightHandle(float x, float y); /// Get and Set JSON methods - string Json(); ///< Generate JSON string of this object - Json::Value JsonValue(); ///< Generate Json::JsonValue for this object - void SetJson(string value); ///< Load JSON string into this object - void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object + std::string Json() const; ///< Generate JSON string of this object + Json::Value JsonValue() const; ///< Generate Json::Value for this object + void SetJson(const std::string value); ///< Load JSON string into this object + void SetJsonValue(const Json::Value root); ///< Load Json::Value into this object }; diff -Nru libopenshot-0.2.2+dfsg1/include/Profiles.h libopenshot-0.2.5+dfsg1/include/Profiles.h --- libopenshot-0.2.2+dfsg1/include/Profiles.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/Profiles.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for Profile class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -42,8 +45,6 @@ #include "Fraction.h" #include "Json.h" -using namespace std; - namespace openshot { @@ -55,7 +56,7 @@ */ struct ProfileInfo { - string description; ///< The description of this profile. + std::string description; ///< The description of this profile. int height; ///< The height of the video (in pixels) int width; ///< The width of the video (in pixels) int pixel_format; ///< The pixel format (i.e. YUV420P, RGB24, etc...) @@ -86,13 +87,13 @@ /// @brief Constructor for Profile. /// @param path The folder path / location of a profile file - Profile(string path); + Profile(std::string path); /// Get and Set JSON methods - string Json(); ///< Generate JSON string of this object - Json::Value JsonValue(); ///< Generate Json::JsonValue for this object - void SetJson(string value); ///< Load JSON string into this object - void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object + std::string Json() const; ///< Generate JSON string of this object + Json::Value JsonValue() const; ///< Generate Json::Value for this object + void SetJson(const std::string value); ///< Load JSON string into this object + void SetJsonValue(const Json::Value root); ///< Load Json::Value into this object }; } diff -Nru libopenshot-0.2.2+dfsg1/include/Qt/AudioPlaybackThread.h libopenshot-0.2.5+dfsg1/include/Qt/AudioPlaybackThread.h --- libopenshot-0.2.2+dfsg1/include/Qt/AudioPlaybackThread.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/Qt/AudioPlaybackThread.h 2020-03-03 08:00:06.000000000 +0000 @@ -4,9 +4,12 @@ * @author Duzy Chan * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -32,19 +35,18 @@ #include "../ReaderBase.h" #include "../RendererBase.h" #include "../AudioReaderSource.h" +#include "../AudioDeviceInfo.h" +#include "../Settings.h" namespace openshot { - using juce::Thread; - using juce::WaitableEvent; - - struct SafeTimeSliceThread : TimeSliceThread + struct SafeTimeSliceThread : juce::TimeSliceThread { - SafeTimeSliceThread(const String & s) : TimeSliceThread(s) {} + SafeTimeSliceThread(const String & s) : juce::TimeSliceThread(s) {} void run() { try { - TimeSliceThread::run(); + juce::TimeSliceThread::run(); } catch (const TooManySeeks & e) { // ... } @@ -57,14 +59,20 @@ class AudioDeviceManagerSingleton { private: /// Default constructor (Don't allow user to create an instance of this singleton) - AudioDeviceManagerSingleton(){}; + AudioDeviceManagerSingleton(){ initialise_error=""; }; /// Private variable to keep track of singleton instance static AudioDeviceManagerSingleton * m_pInstance; public: - /// Create or get an instance of this singleton (invoke the class with this method) - static AudioDeviceManagerSingleton * Instance(int numChannels); + /// Error found during JUCE initialise method + std::string initialise_error; + + /// List of valid audio device names + std::vector audio_device_names; + + /// Override with no channels and no preferred audio device + static AudioDeviceManagerSingleton * Instance(); /// Public device manager property AudioDeviceManager audioDeviceManager; @@ -76,54 +84,66 @@ /** * @brief The audio playback thread */ - class AudioPlaybackThread : Thread + class AudioPlaybackThread : juce::Thread { - AudioSourcePlayer player; - AudioTransportSource transport; - MixerAudioSource mixer; - AudioReaderSource *source; - double sampleRate; - int numChannels; - WaitableEvent play; - WaitableEvent played; - int buffer_size; - bool is_playing; - SafeTimeSliceThread time_thread; - - /// Constructor - AudioPlaybackThread(); - /// Destructor - ~AudioPlaybackThread(); - - /// Set the current thread's reader - void Reader(ReaderBase *reader); - - /// Get the current frame object (which is filling the buffer) - std::shared_ptr getFrame(); - - /// Get the current frame number being played - int64_t getCurrentFramePosition(); - - /// Play the audio - void Play(); - - /// Seek the audio thread - void Seek(int64_t new_position); - - /// Stop the audio playback - void Stop(); - - /// Start thread - void run(); - - /// Set Speed (The speed and direction to playback a reader (1=normal, 2=fast, 3=faster, -1=rewind, etc...) - void setSpeed(int new_speed) { if (source) source->setSpeed(new_speed); } - - /// Get Speed (The speed and direction to playback a reader (1=normal, 2=fast, 3=faster, -1=rewind, etc...) - int getSpeed() const { if (source) return source->getSpeed(); else return 1; } + AudioSourcePlayer player; + AudioTransportSource transport; + MixerAudioSource mixer; + AudioReaderSource *source; + double sampleRate; + int numChannels; + juce::WaitableEvent play; + juce::WaitableEvent played; + int buffer_size; + bool is_playing; + SafeTimeSliceThread time_thread; + + /// Constructor + AudioPlaybackThread(); + /// Destructor + ~AudioPlaybackThread(); + + /// Set the current thread's reader + void Reader(openshot::ReaderBase *reader); + + /// Get the current frame object (which is filling the buffer) + std::shared_ptr getFrame(); + + /// Get the current frame number being played + int64_t getCurrentFramePosition(); + + /// Play the audio + void Play(); + + /// Seek the audio thread + void Seek(int64_t new_position); + + /// Stop the audio playback + void Stop(); + + /// Start thread + void run(); + + /// Set Speed (The speed and direction to playback a reader (1=normal, 2=fast, 3=faster, -1=rewind, etc...) + void setSpeed(int new_speed) { if (source) source->setSpeed(new_speed); } + + /// Get Speed (The speed and direction to playback a reader (1=normal, 2=fast, 3=faster, -1=rewind, etc...) + int getSpeed() const { if (source) return source->getSpeed(); else return 1; } + + /// Get Audio Error (if any) + std::string getError() + { + return AudioDeviceManagerSingleton::Instance()->initialise_error; + } + + /// Get Audio Device Names (if any) + std::vector getAudioDeviceNames() + { + return AudioDeviceManagerSingleton::Instance()->audio_device_names; + }; - friend class PlayerPrivate; - friend class QtPlayer; + friend class PlayerPrivate; + friend class QtPlayer; }; } diff -Nru libopenshot-0.2.2+dfsg1/include/Qt/PlayerDemo.h libopenshot-0.2.5+dfsg1/include/Qt/PlayerDemo.h --- libopenshot-0.2.2+dfsg1/include/Qt/PlayerDemo.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/Qt/PlayerDemo.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for demo application for QtPlayer class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -43,8 +46,6 @@ class QtPlayer; } -using openshot::QtPlayer; - class PlayerDemo : public QWidget { Q_OBJECT @@ -54,7 +55,7 @@ ~PlayerDemo(); protected: - void keyPressEvent(QKeyEvent *event); + void keyPressEvent(QKeyEvent *event) Q_DECL_OVERRIDE; void closeEvent(QCloseEvent *event) Q_DECL_OVERRIDE; private slots: @@ -64,7 +65,7 @@ QVBoxLayout *vbox; QMenuBar *menu; VideoRenderWidget *video; - QtPlayer *player; + openshot::QtPlayer *player; }; #endif // OPENSHOT_PLAYER_H diff -Nru libopenshot-0.2.2+dfsg1/include/Qt/PlayerPrivate.h libopenshot-0.2.5+dfsg1/include/Qt/PlayerPrivate.h --- libopenshot-0.2.2+dfsg1/include/Qt/PlayerPrivate.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/Qt/PlayerPrivate.h 2020-03-03 08:00:06.000000000 +0000 @@ -4,9 +4,12 @@ * @author Duzy Chan * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -38,27 +41,25 @@ namespace openshot { - using juce::Thread; - /** * @brief The private part of QtPlayer class, which contains an audio thread and video thread, * and controls the video timing and audio synchronization code. */ - class PlayerPrivate : Thread + class PlayerPrivate : juce::Thread { - std::shared_ptr frame; /// The current frame + std::shared_ptr frame; /// The current frame int64_t video_position; /// The current frame position. int64_t audio_position; /// The current frame position. - ReaderBase *reader; /// The reader which powers this player - AudioPlaybackThread *audioPlayback; /// The audio thread - VideoPlaybackThread *videoPlayback; /// The video thread - VideoCacheThread *videoCache; /// The cache thread + openshot::ReaderBase *reader; /// The reader which powers this player + openshot::AudioPlaybackThread *audioPlayback; /// The audio thread + openshot::VideoPlaybackThread *videoPlayback; /// The video thread + openshot::VideoCacheThread *videoCache; /// The cache thread int speed; /// The speed and direction to playback a reader (1=normal, 2=fast, 3=faster, -1=rewind, etc...) - RendererBase *renderer; + openshot::RendererBase *renderer; int64_t last_video_position; /// The last frame actually displayed /// Constructor - PlayerPrivate(RendererBase *rb); + PlayerPrivate(openshot::RendererBase *rb); /// Destructor virtual ~PlayerPrivate(); @@ -72,7 +73,7 @@ void stopPlayback(int timeOutMilliseconds = -1); /// Get the next frame (based on speed and direction) - std::shared_ptr getFrame(); + std::shared_ptr getFrame(); /// The parent class of PlayerPrivate friend class QtPlayer; diff -Nru libopenshot-0.2.2+dfsg1/include/Qt/VideoCacheThread.h libopenshot-0.2.5+dfsg1/include/Qt/VideoCacheThread.h --- libopenshot-0.2.2+dfsg1/include/Qt/VideoCacheThread.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/Qt/VideoCacheThread.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for VideoCacheThread class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the diff -Nru libopenshot-0.2.2+dfsg1/include/Qt/VideoPlaybackThread.h libopenshot-0.2.5+dfsg1/include/Qt/VideoPlaybackThread.h --- libopenshot-0.2.2+dfsg1/include/Qt/VideoPlaybackThread.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/Qt/VideoPlaybackThread.h 2020-03-03 08:00:06.000000000 +0000 @@ -4,9 +4,12 @@ * @author Duzy Chan * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the diff -Nru libopenshot-0.2.2+dfsg1/include/Qt/VideoRenderer.h libopenshot-0.2.5+dfsg1/include/Qt/VideoRenderer.h --- libopenshot-0.2.2+dfsg1/include/Qt/VideoRenderer.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/Qt/VideoRenderer.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for Video Renderer class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the diff -Nru libopenshot-0.2.2+dfsg1/include/Qt/VideoRenderWidget.h libopenshot-0.2.5+dfsg1/include/Qt/VideoRenderWidget.h --- libopenshot-0.2.2+dfsg1/include/Qt/VideoRenderWidget.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/Qt/VideoRenderWidget.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for Video RendererWidget class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -57,7 +60,7 @@ QRect centeredViewport(int width, int height); private slots: - void present(const QImage &image); + void present(const QImage &image); }; diff -Nru libopenshot-0.2.2+dfsg1/include/QtHtmlReader.h libopenshot-0.2.5+dfsg1/include/QtHtmlReader.h --- libopenshot-0.2.2+dfsg1/include/QtHtmlReader.h 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/QtHtmlReader.h 2020-03-03 08:00:06.000000000 +0000 @@ -0,0 +1,145 @@ +/** + * @file + * @brief Header file for QtHtmlReader class + * @author Jonathan Thomas + * @author Sergei Kolesov (jediserg) + * @author Jeff Shillitto (jeffski) + * + * @ref License + */ + +/* LICENSE + * + * Copyright (c) 2008-2019 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#ifndef OPENSHOT_QT_HTML_READER_H +#define OPENSHOT_QT_HTML_READER_H + +#include "ReaderBase.h" + +#include +#include +#include +#include +#include +#include +#include "CacheMemory.h" +#include "Enums.h" +#include "Exceptions.h" + +class QImage; + +namespace openshot +{ + + /** + * @brief This class uses Qt libraries, to create frames with rendered HTML, and return + * openshot::Frame objects. + * + * Supports HTML/CSS subset available via Qt libraries, see: https://doc.qt.io/qt-5/richtext-html-subset.html + * + * @code + * // Any application using this class must instantiate either QGuiApplication or QApplication + * QApplication a(argc, argv); + * + * // Create a reader to generate an openshot::Frame containing text + * QtHtmlReader r(720, // width + * 480, // height + * 5, // x_offset + * 5, // y_offset + * GRAVITY_CENTER, // gravity + * "Check out this Text!", // html + * "b { color: #ff0000 }", // css + * "#000000" // background_color + * ); + * r.Open(); // Open the reader + * + * // Get frame number 1 from the video (in fact, any frame # you request will return the same frame) + * std::shared_ptr f = r.GetFrame(1); + * + * // Now that we have an openshot::Frame object, lets have some fun! + * f->Display(); // Display the frame on the screen + * + * // Close the reader + * r.Close(); + * @endcode + */ + class QtHtmlReader : public ReaderBase + { + private: + int width; + int height; + int x_offset; + int y_offset; + std::string html; + std::string css; + std::string background_color; + std::shared_ptr image; + bool is_open; + openshot::GravityType gravity; + public: + + /// Default constructor (blank text) + QtHtmlReader(); + + /// @brief Constructor for QtHtmlReader with all parameters. + /// @param width The width of the requested openshot::Frame (not the size of the text) + /// @param height The height of the requested openshot::Frame (not the size of the text) + /// @param x_offset The number of pixels to offset the text on the X axis (horizontal) + /// @param y_offset The number of pixels to offset the text on the Y axis (vertical) + /// @param gravity The alignment / gravity of the text + /// @param html The HTML you want to render / display + /// @param css The CSS you want to apply to style the HTML + /// @param background_color The background color of the frame image (valid values are a color string in \#RRGGBB or \#AARRGGBB notation, a CSS color name, or 'transparent') + QtHtmlReader(int width, int height, int x_offset, int y_offset, GravityType gravity, std::string html, std::string css, std::string background_color); + + /// Close Reader + void Close(); + + /// Get the cache object used by this reader (always returns NULL for this object) + openshot::CacheMemory* GetCache() { return NULL; }; + + /// Get an openshot::Frame object for a specific frame number of this reader. All numbers + /// return the same Frame, since they all share the same image data. + /// + /// @returns The requested frame (containing the image) + /// @param requested_frame The frame number that is requested. + std::shared_ptr GetFrame(int64_t requested_frame); + + /// Determine if reader is open or closed + bool IsOpen() { return is_open; }; + + /// Return the type name of the class + std::string Name() { return "QtHtmlReader"; }; + + /// Get and Set JSON methods + std::string Json() const override; ///< Generate JSON string of this object + void SetJson(const std::string value); ///< Load JSON string into this object + Json::Value JsonValue() const override; ///< Generate Json::Value for this object + void SetJsonValue(const Json::Value root); ///< Load Json::Value into this object + + /// Open Reader - which is called by the constructor automatically + void Open(); + }; + +} + +#endif diff -Nru libopenshot-0.2.2+dfsg1/include/QtImageReader.h libopenshot-0.2.5+dfsg1/include/QtImageReader.h --- libopenshot-0.2.2+dfsg1/include/QtImageReader.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/QtImageReader.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for QtImageReader class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -28,21 +31,14 @@ #ifndef OPENSHOT_QIMAGE_READER_H #define OPENSHOT_QIMAGE_READER_H -#include "ReaderBase.h" - #include #include #include #include #include #include -#include -#include -#include -#include "CacheMemory.h" #include "Exceptions.h" - -using namespace std; +#include "ReaderBase.h" namespace openshot { @@ -69,21 +65,24 @@ class QtImageReader : public ReaderBase { private: - string path; - std::shared_ptr image; ///> Original image (full quality) - std::shared_ptr cached_image; ///> Scaled for performance - bool is_open; + QString path; + std::shared_ptr image; ///> Original image (full quality) + std::shared_ptr cached_image; ///> Scaled for performance + bool is_open; ///> Is Reader opened + QSize max_size; ///> Current max_size as calculated with Clip properties public: /// Constructor for QtImageReader. This automatically opens the media file and loads /// frame 1, or it throws one of the following exceptions. - QtImageReader(string path); + QtImageReader(std::string path); - /// Constructor for QtImageReader. This only opens the media file to inspect it's properties + /// Constructor for QtImageReader. This only opens the media file to inspect its properties /// if inspect_reader=true. When not inspecting the media file, it's much faster, and useful /// when you are inflating the object using JSON after instantiating it. - QtImageReader(string path, bool inspect_reader); + QtImageReader(std::string path, bool inspect_reader); + + virtual ~QtImageReader(); /// Close File void Close(); @@ -102,16 +101,13 @@ bool IsOpen() { return is_open; }; /// Return the type name of the class - string Name() { return "QtImageReader"; }; + std::string Name() { return "QtImageReader"; }; /// Get and Set JSON methods - string Json(); ///< Generate JSON string of this object - void SetJson(string value); ///< Load JSON string into this object - Json::Value JsonValue(); ///< Generate Json::JsonValue for this object - void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object - - /// Set Max Image Size (used for performance optimization) - void SetMaxSize(int width, int height); + std::string Json() const override; ///< Generate JSON string of this object + void SetJson(const std::string value); ///< Load JSON string into this object + Json::Value JsonValue() const override; ///< Generate Json::Value for this object + void SetJsonValue(const Json::Value root); ///< Load Json::Value into this object /// Open File - which is called by the constructor automatically void Open(); diff -Nru libopenshot-0.2.2+dfsg1/include/QtPlayer.h libopenshot-0.2.5+dfsg1/include/QtPlayer.h --- libopenshot-0.2.2+dfsg1/include/QtPlayer.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/QtPlayer.h 2020-03-03 08:00:06.000000000 +0000 @@ -4,9 +4,12 @@ * @author Duzy Chan * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -35,23 +38,21 @@ #include "Qt/PlayerPrivate.h" #include "RendererBase.h" -using namespace std; - namespace openshot { /** * @brief This class is used to playback a video from a reader. * */ - class QtPlayer : public PlayerBase + class QtPlayer : public openshot::PlayerBase { - PlayerPrivate *p; + openshot::PlayerPrivate *p; bool threads_started; public: /// Default constructor explicit QtPlayer(); - explicit QtPlayer(RendererBase *rb); + explicit QtPlayer(openshot::RendererBase *rb); /// Default destructor virtual ~QtPlayer(); @@ -59,22 +60,28 @@ /// Close audio device void CloseAudioDevice(); + /// Get Error (if any) + std::string GetError(); + + /// Get Audio Devices from JUCE + std::vector GetAudioDeviceNames(); + /// Play the video void Play(); - + /// Display a loading animation void Loading(); - + /// Get the current mode - PlaybackMode Mode(); + openshot::PlaybackMode Mode(); /// Pause the video void Pause(); - + /// Get the current frame number being played - int Position(); - - /// Seek to a specific frame in the player + int64_t Position(); + + /// Seek to a specific frame in the player void Seek(int64_t new_frame); /// Set the source URL/path of this player (which will create an internal Reader) @@ -98,10 +105,10 @@ void Stop(); /// Set the current reader - void Reader(ReaderBase *new_reader); + void Reader(openshot::ReaderBase *new_reader); /// Get the current reader, such as a FFmpegReader - ReaderBase* Reader(); + openshot::ReaderBase* Reader(); /// Get the Volume float Volume(); @@ -109,7 +116,6 @@ /// Set the Volume (1.0 = normal volume, <1.0 = quieter, >1.0 louder) void Volume(float new_volume); }; - } #endif diff -Nru libopenshot-0.2.2+dfsg1/include/QtTextReader.h libopenshot-0.2.5+dfsg1/include/QtTextReader.h --- libopenshot-0.2.2+dfsg1/include/QtTextReader.h 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/QtTextReader.h 2020-03-03 08:00:06.000000000 +0000 @@ -0,0 +1,156 @@ +/** + * @file + * @brief Header file for QtTextReader class + * @author Jonathan Thomas + * @author Sergei Kolesov (jediserg) + * @author Jeff Shillitto (jeffski) + * + * @ref License + */ + +/* LICENSE + * + * Copyright (c) 2008-2019 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#ifndef OPENSHOT_QT_TEXT_READER_H +#define OPENSHOT_QT_TEXT_READER_H + +#include "ReaderBase.h" + +#include +#include +#include +#include +#include +#include +#include "CacheMemory.h" +#include "Enums.h" +#include "Exceptions.h" + +class QImage; + +namespace openshot +{ + + /** + * @brief This class uses Qt libraries, to create frames with "Text", and return + * openshot::Frame objects. + * + * All system fonts are supported, including many different font properties, such as size, color, + * alignment, padding, etc... + * + * @code + * // Any application using this class must instantiate either QGuiApplication or QApplication + * QApplication a(argc, argv); + * + * // Create a reader to generate an openshot::Frame containing text + * QtTextReader r(720, // width + * 480, // height + * 5, // x_offset + * 5, // y_offset + * GRAVITY_CENTER, // gravity + * "Check out this Text!", // text + * "Arial", // font + * 15.0, // font size + * "#fff000", // text_color + * "#000000" // background_color + * ); + * r.Open(); // Open the reader + * + * // Get frame number 1 from the video (in fact, any frame # you request will return the same frame) + * std::shared_ptr f = r.GetFrame(1); + * + * // Now that we have an openshot::Frame object, lets have some fun! + * f->Display(); // Display the frame on the screen + * + * // Close the reader + * r.Close(); + * @endcode + */ + class QtTextReader : public ReaderBase + { + private: + int width; + int height; + int x_offset; + int y_offset; + std::string text; + QFont font; + std::string text_color; + std::string background_color; + std::string text_background_color; + std::shared_ptr image; + bool is_open; + openshot::GravityType gravity; + + public: + + /// Default constructor (blank text) + QtTextReader(); + + /// @brief Constructor for QtTextReader with all parameters. + /// @param width The width of the requested openshot::Frame (not the size of the text) + /// @param height The height of the requested openshot::Frame (not the size of the text) + /// @param x_offset The number of pixels to offset the text on the X axis (horizontal) + /// @param y_offset The number of pixels to offset the text on the Y axis (vertical) + /// @param gravity The alignment / gravity of the text + /// @param text The text you want to generate / display + /// @param font The font of the text + /// @param text_color The color of the text (valid values are a color string in \#RRGGBB or \#AARRGGBB notation or a CSS color name) + /// @param background_color The background color of the frame image (valid values are a color string in \#RRGGBB or \#AARRGGBB notation, a CSS color name, or 'transparent') + QtTextReader(int width, int height, int x_offset, int y_offset, GravityType gravity, std::string text, QFont font, std::string text_color, std::string background_color); + + /// Draw a box under rendered text using the specified color. + /// @param color The background color behind the text (valid values are a color string in \#RRGGBB or \#AARRGGBB notation or a CSS color name) + void SetTextBackgroundColor(std::string color); + + /// Close Reader + void Close(); + + /// Get the cache object used by this reader (always returns NULL for this object) + openshot::CacheMemory* GetCache() { return NULL; }; + + /// Get an openshot::Frame object for a specific frame number of this reader. All numbers + /// return the same Frame, since they all share the same image data. + /// + /// @returns The requested frame (containing the image) + /// @param requested_frame The frame number that is requested. + std::shared_ptr GetFrame(int64_t requested_frame); + + /// Determine if reader is open or closed + bool IsOpen() { return is_open; }; + + /// Return the type name of the class + std::string Name() { return "QtTextReader"; }; + + /// Get and Set JSON methods + std::string Json() const override; ///< Generate JSON string of this object + void SetJson(const std::string value); ///< Load JSON string into this object + Json::Value JsonValue() const override; ///< Generate Json::Value for this object + void SetJsonValue(const Json::Value root); ///< Load Json::Value into this object + + /// Open Reader - which is called by the constructor automatically + void Open(); + }; + +} + +#endif diff -Nru libopenshot-0.2.2+dfsg1/include/ReaderBase.h libopenshot-0.2.5+dfsg1/include/ReaderBase.h --- libopenshot-0.2.2+dfsg1/include/ReaderBase.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/ReaderBase.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for ReaderBase class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -31,10 +34,11 @@ #include #include #include -#include +#include #include #include "CacheMemory.h" #include "ChannelLayouts.h" +#include "ClipBase.h" #include "Fraction.h" #include "Frame.h" #include "Json.h" @@ -45,16 +49,13 @@ #include #include -using namespace std; - namespace openshot { /** * @brief This struct contains info about a media file, such as height, width, frames per second, etc... * * Each derived class of ReaderBase is responsible for updating this struct to reflect accurate information - * about the streams. Derived classes of ReaderBase should call the InitFileInfo() method to initialize the - * default values of this struct. + * about the streams. */ struct ReaderInfo { @@ -66,24 +67,24 @@ int height; ///< The height of the video (in pixels) int width; ///< The width of the video (in pixesl) int pixel_format; ///< The pixel format (i.e. YUV420P, RGB24, etc...) - Fraction fps; ///< Frames per second, as a fraction (i.e. 24/1 = 24 fps) + openshot::Fraction fps; ///< Frames per second, as a fraction (i.e. 24/1 = 24 fps) int video_bit_rate; ///< The bit rate of the video stream (in bytes) - Fraction pixel_ratio; ///< The pixel ratio of the video stream as a fraction (i.e. some pixels are not square) - Fraction display_ratio; ///< The ratio of width to height of the video stream (i.e. 640x480 has a ratio of 4/3) - string vcodec; ///< The name of the video codec used to encode / decode the video stream + openshot::Fraction pixel_ratio; ///< The pixel ratio of the video stream as a fraction (i.e. some pixels are not square) + openshot::Fraction display_ratio; ///< The ratio of width to height of the video stream (i.e. 640x480 has a ratio of 4/3) + std::string vcodec; ///< The name of the video codec used to encode / decode the video stream int64_t video_length; ///< The number of frames in the video stream int video_stream_index; ///< The index of the video stream - Fraction video_timebase; ///< The video timebase determines how long each frame stays on the screen + openshot::Fraction video_timebase; ///< The video timebase determines how long each frame stays on the screen bool interlaced_frame; // Are the contents of this frame interlaced bool top_field_first; // Which interlaced field should be displayed first - string acodec; ///< The name of the audio codec used to encode / decode the video stream + std::string acodec; ///< The name of the audio codec used to encode / decode the video stream int audio_bit_rate; ///< The bit rate of the audio stream (in bytes) int sample_rate; ///< The number of audio samples per second (44100 is a common sample rate) int channels; ///< The number of audio channels used in the audio stream - ChannelLayout channel_layout; ///< The channel layout (mono, stereo, 5 point surround, etc...) + openshot::ChannelLayout channel_layout; ///< The channel layout (mono, stereo, 5 point surround, etc...) int audio_stream_index; ///< The index of the audio stream - Fraction audio_timebase; ///< The audio timebase determines how long each audio packet should be played - std::map metadata; ///< An optional map/dictionary of metadata for this reader + openshot::Fraction audio_timebase; ///< The audio timebase determines how long each audio packet should be played + std::map metadata; ///< An optional map/dictionary of metadata for this reader }; /** @@ -91,17 +92,15 @@ * * Readers are types of classes that read video, audio, and image files, and * return openshot::Frame objects. The only requirements for a 'reader', are to - * derive from this base class, implement the GetFrame method, and call the InitFileInfo() method. + * derive from this base class, implement the GetFrame method, and populate ReaderInfo. */ class ReaderBase { protected: /// Section lock for multiple threads - CriticalSection getFrameCriticalSection; - CriticalSection processingCriticalSection; - - int max_width; ///< The maximum image width needed by this clip (used for optimizations) - int max_height; ///< The maximium image height needed by this clip (used for optimizations) + juce::CriticalSection getFrameCriticalSection; + juce::CriticalSection processingCriticalSection; + openshot::ClipBase* parent; public: @@ -109,7 +108,13 @@ ReaderBase(); /// Information about the current media file - ReaderInfo info; + openshot::ReaderInfo info; + + /// Parent clip object of this reader (which can be unparented and NULL) + openshot::ClipBase* GetClip(); + + /// Set parent clip object of this reader + void SetClip(openshot::ClipBase* clip); /// Close the reader (and any resources it was consuming) virtual void Close() = 0; @@ -118,7 +123,7 @@ void DisplayInfo(); /// Get the cache object used by this reader (note: not all readers use cache) - virtual CacheBase* GetCache() = 0; + virtual openshot::CacheBase* GetCache() = 0; /// This method is required for all derived classes of ReaderBase, and returns the /// openshot::Frame object, which contains the image and audio information for that @@ -126,25 +131,24 @@ /// /// @returns The requested frame of video /// @param[in] number The frame number that is requested. - virtual std::shared_ptr GetFrame(int64_t number) = 0; + virtual std::shared_ptr GetFrame(int64_t number) = 0; /// Determine if reader is open or closed virtual bool IsOpen() = 0; /// Return the type name of the class - virtual string Name() = 0; + virtual std::string Name() = 0; /// Get and Set JSON methods - virtual string Json() = 0; ///< Generate JSON string of this object - virtual void SetJson(string value) = 0; ///< Load JSON string into this object - virtual Json::Value JsonValue() = 0; ///< Generate Json::JsonValue for this object - virtual void SetJsonValue(Json::Value root) = 0; ///< Load Json::JsonValue into this object - - /// Set Max Image Size (used for performance optimization) - void SetMaxSize(int width, int height) { max_width = width; max_height = height; }; + virtual std::string Json() const = 0; ///< Generate JSON string of this object + virtual void SetJson(const std::string value) = 0; ///< Load JSON string into this object + virtual Json::Value JsonValue() const = 0; ///< Generate Json::Value for this object + virtual void SetJsonValue(const Json::Value root) = 0; ///< Load Json::Value into this object /// Open the reader (and start consuming resources, such as images or video files) virtual void Open() = 0; + + virtual ~ReaderBase() = default; }; } diff -Nru libopenshot-0.2.2+dfsg1/include/RendererBase.h libopenshot-0.2.5+dfsg1/include/RendererBase.h --- libopenshot-0.2.2+dfsg1/include/RendererBase.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/RendererBase.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for RendererBase class * @author Duzy Chan * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -47,7 +50,7 @@ public: /// Paint(render) a video Frame. - void paint(const std::shared_ptr & frame); + void paint(const std::shared_ptr & frame); /// Allow manual override of the QWidget that is used to display virtual void OverrideWidget(int64_t qwidget_address) = 0; @@ -55,7 +58,7 @@ protected: RendererBase(); virtual ~RendererBase(); - + virtual void render(std::shared_ptr image) = 0; }; diff -Nru libopenshot-0.2.2+dfsg1/include/Settings.h libopenshot-0.2.5+dfsg1/include/Settings.h --- libopenshot-0.2.2+dfsg1/include/Settings.h 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/Settings.h 2020-03-03 08:00:06.000000000 +0000 @@ -0,0 +1,133 @@ +/** + * @file + * @brief Header file for global Settings class + * @author Jonathan Thomas + * + * @ref License + */ + +/* LICENSE + * + * Copyright (c) 2008-2019 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#ifndef OPENSHOT_SETTINGS_H +#define OPENSHOT_SETTINGS_H + + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "JuceHeader.h" + + +namespace openshot { + + /** + * @brief This class is contains settings used by libopenshot (and can be safely toggled at any point) + * + * Settings class is used primarily to toggle scale settings between preview and rendering, and adjust + * other runtime related settings. + */ + class Settings { + private: + + /// Default constructor + Settings(){}; // Don't allow user to create an instance of this singleton + +#if __GNUC__ >=7 + /// Default copy method + Settings(Settings const&) = delete; // Don't allow the user to assign this instance + + /// Default assignment operator + Settings & operator=(Settings const&) = delete; // Don't allow the user to assign this instance +#else + /// Default copy method + Settings(Settings const&) {}; // Don't allow the user to assign this instance + + /// Default assignment operator + Settings & operator=(Settings const&); // Don't allow the user to assign this instance +#endif + + /// Private variable to keep track of singleton instance + static Settings * m_pInstance; + + public: + /** + * @brief Use video codec for faster video decoding (if supported) + * + * 0 - No acceleration, + * 1 - Linux VA-API, + * 2 - nVidia NVDEC, + * 3 - Windows D3D9, + * 4 - Windows D3D11, + * 5 - MacOS / VideoToolBox, + * 6 - Linux VDPAU, + * 7 - Intel QSV + */ + int HARDWARE_DECODER = 0; + + /// Scale mode used in FFmpeg decoding and encoding (used as an optimization for faster previews) + bool HIGH_QUALITY_SCALING = false; + + /// Maximum width for image data (useful for optimzing for a smaller preview or render) + int MAX_WIDTH = 0; + + /// Maximum height for image data (useful for optimzing for a smaller preview or render) + int MAX_HEIGHT = 0; + + /// Wait for OpenMP task to finish before continuing (used to limit threads on slower systems) + bool WAIT_FOR_VIDEO_PROCESSING_TASK = false; + + /// Number of threads of OpenMP + int OMP_THREADS = 12; + + /// Number of threads that ffmpeg uses + int FF_THREADS = 8; + + /// Maximum rows that hardware decode can handle + int DE_LIMIT_HEIGHT_MAX = 1100; + + /// Maximum columns that hardware decode can handle + int DE_LIMIT_WIDTH_MAX = 1950; + + /// Which GPU to use to decode (0 is the first) + int HW_DE_DEVICE_SET = 0; + + /// Which GPU to use to encode (0 is the first) + int HW_EN_DEVICE_SET = 0; + + /// The audio device name to use during playback + std::string PLAYBACK_AUDIO_DEVICE_NAME = ""; + + /// Create or get an instance of this logger singleton (invoke the class with this method) + static Settings * Instance(); + }; + +} + +#endif diff -Nru libopenshot-0.2.2+dfsg1/include/Tests.h libopenshot-0.2.5+dfsg1/include/Tests.h --- libopenshot-0.2.2+dfsg1/include/Tests.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/Tests.h 1970-01-01 00:00:00.000000000 +0000 @@ -1,35 +0,0 @@ -/** - * @file - * @brief Header file for UnitTests - * @author Jonathan Thomas - * - * @section LICENSE - * - * Copyright (c) 2008-2014 OpenShot Studios, LLC - * . This file is part of - * OpenShot Library (libopenshot), an open-source project dedicated to - * delivering high quality video editing and animation solutions to the - * world. For more information visit . - * - * OpenShot Library (libopenshot) is free software: you can redistribute it - * and/or modify it under the terms of the GNU Lesser General Public License - * as published by the Free Software Foundation, either version 3 of the - * License, or (at your option) any later version. - * - * OpenShot Library (libopenshot) is distributed in the hope that it will be - * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with OpenShot Library. If not, see . - */ - -#ifndef OPENSHOT_UNITTESTS_H -#define OPENSHOT_UNITTESTS_H - - #ifndef TEST_MEDIA_PATH - #define TEST_MEDIA_PATH "../../src/examples/" - #endif - -#endif diff -Nru libopenshot-0.2.2+dfsg1/include/TextReader.h libopenshot-0.2.5+dfsg1/include/TextReader.h --- libopenshot-0.2.2+dfsg1/include/TextReader.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/TextReader.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for TextReader class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -28,6 +31,9 @@ #ifndef OPENSHOT_TEXT_READER_H #define OPENSHOT_TEXT_READER_H +// Require ImageMagick support +#ifdef USE_IMAGEMAGICK + #include "ReaderBase.h" #include @@ -36,12 +42,10 @@ #include #include #include -#include "Magick++.h" #include "CacheMemory.h" #include "Enums.h" #include "Exceptions.h" - -using namespace std; +#include "MagickUtilities.h" namespace openshot { @@ -85,15 +89,16 @@ int height; int x_offset; int y_offset; - string text; - string font; + std::string text; + std::string font; double size; - string text_color; - string background_color; + std::string text_color; + std::string background_color; + std::string text_background_color; std::shared_ptr image; - list lines; + MAGICK_DRAWABLE lines; bool is_open; - GravityType gravity; + openshot::GravityType gravity; public: @@ -110,33 +115,37 @@ /// @param font The font of the text /// @param size The size of the text /// @param text_color The color of the text - /// @param background_color The background color of the text (also supports Transparent) - TextReader(int width, int height, int x_offset, int y_offset, GravityType gravity, string text, string font, double size, string text_color, string background_color); + /// @param background_color The background color of the text frame image (also supports Transparent) + TextReader(int width, int height, int x_offset, int y_offset, GravityType gravity, std::string text, std::string font, double size, std::string text_color, std::string background_color); + + /// Draw a box under rendered text using the specified color. + /// @param color The background color behind the text + void SetTextBackgroundColor(std::string color); /// Close Reader void Close(); /// Get the cache object used by this reader (always returns NULL for this object) - CacheMemory* GetCache() { return NULL; }; + openshot::CacheMemory* GetCache() { return NULL; }; /// Get an openshot::Frame object for a specific frame number of this reader. All numbers /// return the same Frame, since they all share the same image data. /// /// @returns The requested frame (containing the image) /// @param requested_frame The frame number that is requested. - std::shared_ptr GetFrame(int64_t requested_frame); + std::shared_ptr GetFrame(int64_t requested_frame); /// Determine if reader is open or closed bool IsOpen() { return is_open; }; /// Return the type name of the class - string Name() { return "TextReader"; }; + std::string Name() { return "TextReader"; }; /// Get and Set JSON methods - string Json(); ///< Generate JSON string of this object - void SetJson(string value); ///< Load JSON string into this object - Json::Value JsonValue(); ///< Generate Json::JsonValue for this object - void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object + std::string Json() const override; ///< Generate JSON string of this object + void SetJson(const std::string value); ///< Load JSON string into this object + Json::Value JsonValue() const override; ///< Generate Json::Value for this object + void SetJsonValue(const Json::Value root); ///< Load Json::Value into this object /// Open Reader - which is called by the constructor automatically void Open(); @@ -144,4 +153,5 @@ } -#endif +#endif //USE_IMAGEMAGICK +#endif //OPENSHOT_TEXT_READER_H diff -Nru libopenshot-0.2.2+dfsg1/include/Timeline.h libopenshot-0.2.5+dfsg1/include/Timeline.h --- libopenshot-0.2.2+dfsg1/include/Timeline.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/Timeline.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for Timeline class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -30,6 +33,7 @@ #include #include +#include #include #include #include "CacheBase.h" @@ -48,9 +52,7 @@ #include "KeyFrame.h" #include "OpenMPUtilities.h" #include "ReaderBase.h" - -using namespace std; -using namespace openshot; +#include "Settings.h" namespace openshot { @@ -99,6 +101,7 @@ * Fraction(25,1), // framerate * 44100, // sample rate * 2 // channels + * ChannelLayout::LAYOUT_STEREO, * ); * * // Create some clips @@ -146,11 +149,13 @@ private: bool is_open; /// clips; /// closing_clips; /// open_clips; /// effects; /// clips; /// closing_clips; /// open_clips; /// effects; /// allocated_frame_mappers; ///< all the frame mappers we allocated and must free + bool managed_cache; ///< Does this timeline instance manage the cache object /// Process a new layer of video or audio void add_layer(std::shared_ptr new_frame, Clip* source_clip, int64_t clip_frame_number, int64_t timeline_frame_number, bool is_top_clip, float max_volume); @@ -173,7 +178,7 @@ /// @param requested_frame The frame number that is requested. /// @param number_of_frames The number of frames to check /// @param include Include or Exclude intersecting clips - vector find_intersecting_clips(int64_t requested_frame, int number_of_frames, bool include); + std::vector find_intersecting_clips(int64_t requested_frame, int number_of_frames, bool include); /// Get or generate a blank frame std::shared_ptr GetOrCreateFrame(Clip* clip, int64_t number); @@ -204,6 +209,8 @@ /// @param channel_layout The channel layout (i.e. mono, stereo, 3 point surround, etc...) Timeline(int width, int height, Fraction fps, int sample_rate, int channels, ChannelLayout channel_layout); + virtual ~Timeline(); + /// @brief Add an openshot::Clip to the timeline /// @param clip Add an openshot::Clip to the timeline. A clip can contain any type of Reader. void AddClip(Clip* clip); @@ -225,18 +232,19 @@ void ClearAllCache(); /// Return a list of clips on the timeline - list Clips() { return clips; }; + std::list Clips() { return clips; }; /// Close the timeline reader (and any resources it was consuming) void Close(); /// Return the list of effects on the timeline - list Effects() { return effects; }; + std::list Effects() { return effects; }; /// Get the cache object used by this reader CacheBase* GetCache() { return final_cache; }; - /// Get the cache object used by this reader + /// Set the cache object used by this reader. You must now manage the lifecycle + /// of this cache object though (Timeline will not delete it for you). void SetCache(CacheBase* new_cache); /// Get an openshot::Frame object for a specific frame number of this timeline. @@ -257,19 +265,23 @@ bool IsOpen() { return is_open; }; /// Return the type name of the class - string Name() { return "Timeline"; }; + std::string Name() { return "Timeline"; }; /// Get and Set JSON methods - string Json(); ///< Generate JSON string of this object - void SetJson(string value); ///< Load JSON string into this object - Json::Value JsonValue(); ///< Generate Json::JsonValue for this object - void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object + std::string Json() const override; ///< Generate JSON string of this object + void SetJson(const std::string value); ///< Load JSON string into this object + Json::Value JsonValue() const override; ///< Generate Json::Value for this object + void SetJsonValue(const Json::Value root); ///< Load Json::Value into this object + + /// Set Max Image Size (used for performance optimization). Convenience function for setting + /// Settings::Instance()->MAX_WIDTH and Settings::Instance()->MAX_HEIGHT. + void SetMaxSize(int width, int height); /// @brief Apply a special formatted JSON object, which represents a change to the timeline (add, update, delete) /// This is primarily designed to keep the timeline (and its child objects... such as clips and effects) in sync /// with another application... such as OpenShot Video Editor (http://www.openshot.org). /// @param value A JSON string containing a key, value, and type of change. - void ApplyJsonDiff(string value); + void ApplyJsonDiff(std::string value); /// Open the reader (and start consuming resources) void Open(); diff -Nru libopenshot-0.2.2+dfsg1/include/Version.h libopenshot-0.2.5+dfsg1/include/Version.h --- libopenshot-0.2.2+dfsg1/include/Version.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/Version.h 1970-01-01 00:00:00.000000000 +0000 @@ -1,77 +0,0 @@ -/** - * @file - * @brief Header file that includes the version number of libopenshot - * @author Jonathan Thomas - * - * @section LICENSE - * - * Copyright (c) 2008-2014 OpenShot Studios, LLC - * . This file is part of - * OpenShot Library (libopenshot), an open-source project dedicated to - * delivering high quality video editing and animation solutions to the - * world. For more information visit . - * - * OpenShot Library (libopenshot) is free software: you can redistribute it - * and/or modify it under the terms of the GNU Lesser General Public License - * as published by the Free Software Foundation, either version 3 of the - * License, or (at your option) any later version. - * - * OpenShot Library (libopenshot) is distributed in the hope that it will be - * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with OpenShot Library. If not, see . - */ - -#ifndef OPENSHOT_VERSION_H -#define OPENSHOT_VERSION_H - -// Crazy c++ macro to convert an integer into a string -#ifndef STRINGIZE - #define STRINGIZE_(x) #x - #define STRINGIZE(x) STRINGIZE_(x) -#endif - -#define OPENSHOT_VERSION_MAJOR 0; /// Major version number is incremented when huge features are added or improved. -#define OPENSHOT_VERSION_MINOR 2; /// Minor version is incremented when smaller (but still very important) improvements are added. -#define OPENSHOT_VERSION_BUILD 2; /// Build number is incremented when minor bug fixes and less important improvements are added. -#define OPENSHOT_VERSION_SO 16; /// Shared object version number. This increments any time the API and ABI changes (so old apps will no longer link) -#define OPENSHOT_VERSION_MAJOR_MINOR STRINGIZE(OPENSHOT_VERSION_MAJOR) "." STRINGIZE(OPENSHOT_VERSION_MINOR); /// A string of the "Major.Minor" version -#define OPENSHOT_VERSION_ALL STRINGIZE(OPENSHOT_VERSION_MAJOR) "." STRINGIZE(OPENSHOT_VERSION_MINOR) "." STRINGIZE(OPENSHOT_VERSION_BUILD); /// A string of the entire version "Major.Minor.Build" - -#include -using namespace std; - -namespace openshot -{ - /// This struct holds version number information. Use the GetVersion() method to access the current version of libopenshot. - struct OpenShotVersion { - int major; /// Major version number - int minor; /// Minor version number - int build; /// Build number - int so; /// Shared Object Number (incremented when API or ABI changes) - - /// Get a string version of the version (i.e. "Major.Minor.Build") - string ToString() { - stringstream version_string; - version_string << major << "." << minor << "." << build; - return version_string.str(); - } - }; - - /// Get the current version number of libopenshot (major, minor, and build number) - static OpenShotVersion GetVersion() { - OpenShotVersion version; - - // Set version info - version.major = OPENSHOT_VERSION_MAJOR; - version.minor = OPENSHOT_VERSION_MINOR; - version.build = OPENSHOT_VERSION_BUILD; - version.so = OPENSHOT_VERSION_SO; - - return version; - } -} -#endif diff -Nru libopenshot-0.2.2+dfsg1/include/WriterBase.h libopenshot-0.2.5+dfsg1/include/WriterBase.h --- libopenshot-0.2.2+dfsg1/include/WriterBase.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/WriterBase.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for WriterBase class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -36,8 +39,6 @@ #include "ReaderBase.h" #include "ZmqLogger.h" -using namespace std; - namespace openshot { /** @@ -56,24 +57,24 @@ int height; ///< The height of the video (in pixels) int width; ///< The width of the video (in pixels) int pixel_format; ///< The pixel format (i.e. YUV420P, RGB24, etc...) - Fraction fps; ///< Frames per second, as a fraction (i.e. 24/1 = 24 fps) + openshot::Fraction fps; ///< Frames per second, as a fraction (i.e. 24/1 = 24 fps) int video_bit_rate; ///< The bit rate of the video stream (in bytes) - Fraction pixel_ratio; ///< The pixel ratio of the video stream as a fraction (i.e. some pixels are not square) - Fraction display_ratio; ///< The ratio of width to height of the video stream (i.e. 640x480 has a ratio of 4/3) - string vcodec; ///< The name of the video codec used to encode / decode the video stream + openshot::Fraction pixel_ratio; ///< The pixel ratio of the video stream as a fraction (i.e. some pixels are not square) + openshot::Fraction display_ratio; ///< The ratio of width to height of the video stream (i.e. 640x480 has a ratio of 4/3) + std::string vcodec; ///< The name of the video codec used to encode / decode the video stream int64_t video_length; ///< The number of frames in the video stream int video_stream_index; ///< The index of the video stream - Fraction video_timebase; ///< The video timebase determines how long each frame stays on the screen + openshot::Fraction video_timebase; ///< The video timebase determines how long each frame stays on the screen bool interlaced_frame; ///< Are the contents of this frame interlaced bool top_field_first; ///< Which interlaced field should be displayed first - string acodec; ///< The name of the audio codec used to encode / decode the video stream + std::string acodec; ///< The name of the audio codec used to encode / decode the video stream int audio_bit_rate; ///< The bit rate of the audio stream (in bytes) int sample_rate; ///< The number of audio samples per second (44100 is a common sample rate) int channels; ///< The number of audio channels used in the audio stream - ChannelLayout channel_layout; ///< The channel layout (mono, stereo, 5 point surround, etc...) + openshot::ChannelLayout channel_layout; ///< The channel layout (mono, stereo, 5 point surround, etc...) int audio_stream_index; ///< The index of the audio stream - Fraction audio_timebase; ///< The audio timebase determines how long each audio packet should be played - std::map metadata; ///< An optional map/dictionary of video & audio metadata + openshot::Fraction audio_timebase; ///< The audio timebase determines how long each audio packet should be played + std::map metadata; ///< An optional map/dictionary of video & audio metadata }; /** @@ -94,28 +95,30 @@ /// @brief This method copy's the info struct of a reader, and sets the writer with the same info /// @param reader The source reader to copy - void CopyReaderInfo(ReaderBase* reader); + void CopyReaderInfo(openshot::ReaderBase* reader); /// Determine if writer is open or closed virtual bool IsOpen() = 0; /// This method is required for all derived classes of WriterBase. Write a Frame to the video file. - virtual void WriteFrame(std::shared_ptr frame) = 0; + virtual void WriteFrame(std::shared_ptr frame) = 0; /// This method is required for all derived classes of WriterBase. Write a block of frames from a reader. - virtual void WriteFrame(ReaderBase* reader, int64_t start, int64_t length) = 0; + virtual void WriteFrame(openshot::ReaderBase* reader, int64_t start, int64_t length) = 0; /// Get and Set JSON methods - string Json(); ///< Generate JSON string of this object - Json::Value JsonValue(); ///< Generate Json::JsonValue for this object - void SetJson(string value); ///< Load JSON string into this object - void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object + std::string Json() const; ///< Generate JSON string of this object + Json::Value JsonValue() const; ///< Generate Json::Value for this object + void SetJson(const std::string value); ///< Load JSON string into this object + void SetJsonValue(const Json::Value root); ///< Load Json::Value into this object /// Display file information in the standard output stream (stdout) void DisplayInfo(); /// Open the writer (and start initializing streams) virtual void Open() = 0; + + virtual ~WriterBase() = default; }; } diff -Nru libopenshot-0.2.2+dfsg1/include/ZmqLogger.h libopenshot-0.2.5+dfsg1/include/ZmqLogger.h --- libopenshot-0.2.2+dfsg1/include/ZmqLogger.h 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/include/ZmqLogger.h 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Header file for ZeroMQ-based Logger class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -29,7 +32,6 @@ #define OPENSHOT_LOGGER_H -#include "JuceLibraryCode/JuceHeader.h" #include #include #include @@ -40,27 +42,25 @@ #include #include #include +#include "JuceHeader.h" -using namespace std; - namespace openshot { /** - * @brief This abstract class is the base class, used by all readers in libopenshot. + * @brief This class is used for logging and sending those logs over a ZemoMQ socket to a listener * - * Readers are types of classes that read video, audio, and image files, and - * return openshot::Frame objects. The only requirements for a 'reader', are to - * derive from this base class, implement the GetFrame method, and call the InitFileInfo() method. + * OpenShot desktop editor listens to this port, to receive libopenshot debug output. It both logs to + * a file and sends the stdout over a socket. */ class ZmqLogger { private: - CriticalSection loggerCriticalSection; - string connection; + juce::CriticalSection loggerCriticalSection; + std::string connection; // Logfile related vars - string file_path; - ofstream log_file; + std::string file_path; + std::ofstream log_file; bool enabled; /// ZMQ Context @@ -94,30 +94,31 @@ static ZmqLogger * Instance(); /// Append debug information - void AppendDebugMethod(string method_name, string arg1_name, float arg1_value, - string arg2_name, float arg2_value, - string arg3_name, float arg3_value, - string arg4_name, float arg4_value, - string arg5_name, float arg5_value, - string arg6_name, float arg6_value); + void AppendDebugMethod(std::string method_name, + std::string arg1_name="", float arg1_value=-1.0, + std::string arg2_name="", float arg2_value=-1.0, + std::string arg3_name="", float arg3_value=-1.0, + std::string arg4_name="", float arg4_value=-1.0, + std::string arg5_name="", float arg5_value=-1.0, + std::string arg6_name="", float arg6_value=-1.0); /// Close logger (sockets and/or files) void Close(); /// Set or change connection info for logger (i.e. tcp://*:5556) - void Connection(string new_connection); + void Connection(std::string new_connection); /// Enable/Disable logging void Enable(bool is_enabled) { enabled = is_enabled;}; /// Set or change the file path (optional) - void Path(string new_path); + void Path(std::string new_path); /// Log message to all subscribers of this logger (if any) - void Log(string message); + void Log(std::string message); /// Log message to a file (if path set) - void LogToFile(string message); + void LogToFile(std::string message); }; } diff -Nru libopenshot-0.2.2+dfsg1/INSTALL.md libopenshot-0.2.5+dfsg1/INSTALL.md --- libopenshot-0.2.2+dfsg1/INSTALL.md 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/INSTALL.md 2020-03-03 08:00:06.000000000 +0000 @@ -0,0 +1,153 @@ +## Detailed Install Instructions + +Operating system specific install instructions are located in: + +* doc/INSTALL-LINUX.md +* doc/INSTALL-MAC.md +* doc/INSTALL-WINDOWS.md + +## Getting Started + +The best way to get started with libopenshot, is to learn about our build system, obtain all the source code, +install a development IDE and tools, and better understand our dependencies. So, please read through the +following sections, and follow the instructions. And keep in mind, that your computer is likely different +than the one used when writing these instructions. Your file paths and versions of applications might be +slightly different, so keep an eye out for subtle file path differences in the commands you type. + +## Build Tools + +CMake is the backbone of our build system. It is a cross-platform build system, which checks for dependencies, +locates header files and libraries, generates makefiles, and supports the cross-platform compiling of +libopenshot and libopenshot-audio. CMake uses an out-of-source build concept, where all temporary build +files, such as makefiles, object files, and even the final binaries, are created outside of the source +code folder, inside a /build/ sub-folder. This prevents the build process from cluttering up the source +code. These instructions have only been tested with the GNU compiler (including MSYS2/MinGW for Windows). + +## Dependencies + +The following libraries are required to build libopenshot. Instructions on how to install these +dependencies vary for each operating system. Libraries and Executables have been labeled in the +list below to help distinguish between them. + +* ### FFmpeg (libavformat, libavcodec, libavutil, libavdevice, libavresample, libswscale) + * http://www.ffmpeg.org/ `(Library)` + * This library is used to decode and encode video, audio, and image files. It is also used to obtain information about media files, such as frame rate, sample rate, aspect ratio, and other common attributes. + +* ### ImageMagick++ (libMagick++, libMagickWand, libMagickCore) + * http://www.imagemagick.org/script/magick++.php `(Library)` + * This library is **optional**, and used to decode and encode images. + +* ### OpenShot Audio Library (libopenshot-audio) + * https://github.com/OpenShot/libopenshot-audio/ `(Library)` + * This library is used to mix, resample, host plug-ins, and play audio. It is based on the JUCE project, which is an outstanding audio library used by many different applications + +* ### Qt 5 (libqt5) + * http://www.qt.io/qt5/ `(Library)` + * Qt5 is used to display video, store image data, composite images, apply image effects, and many other utility functions, such as file system manipulation, high resolution timers, etc... + +* ### CMake (cmake) + * http://www.cmake.org/ `(Executable)` + * This executable is used to automate the generation of Makefiles, check for dependencies, and is the backbone of libopenshot’s cross-platform build process. + +* ### SWIG (swig) + * http://www.swig.org/ `(Executable)` + * This executable is used to generate the Python and Ruby bindings for libopenshot. It is a simple and powerful wrapper for C++ libraries, and supports many languages. + +* ### Python 3 (libpython) + * http://www.python.org/ `(Executable and Library)` + * This library is used by swig to create the Python (version 3+) bindings for libopenshot. This is also the official language used by OpenShot Video Editor (a graphical interface to libopenshot). + +* ### Doxygen (doxygen) + * http://www.stack.nl/~dimitri/doxygen/ `(Executable)` + * This executable is used to auto-generate the documentation used by libopenshot. + +* ### UnitTest++ (libunittest++) + * https://github.com/unittest-cpp/ `(Library)` + * This library is used to execute unit tests for libopenshot. It contains many macros used to keep our unit testing code very clean and simple. + +* ### ZeroMQ (libzmq) + * http://zeromq.org/ `(Library)` + * This library is used to communicate between libopenshot and other applications (publisher / subscriber). Primarily used to send debug data from libopenshot. + +* ### OpenMP (-fopenmp) + * http://openmp.org/wp/ `(Compiler Flag)` + * If your compiler supports this flag (GCC, Clang, and most other compilers), it provides libopenshot with easy methods of using parallel programming techniques to improve performance and take advantage of multi-core processors. + +## CMake Flags (Optional) +There are many different build flags that can be passed to cmake to adjust how libopenshot is compiled. Some of these flags might be required when compiling on certain OSes, just depending on how your build environment is setup. To add a build flag, follow this general syntax: $ cmake -DMAGICKCORE_HDRI_ENABLE=1 -DENABLE_TESTS=1 ../ + +* MAGICKCORE_HDRI_ENABLE (default 0) +* MAGICKCORE_QUANTUM_DEPTH (default 0) +* OPENSHOT_IMAGEMAGICK_COMPATIBILITY (default 0) +* DISABLE_TESTS (default 0) +* CMAKE_PREFIX_PATH (`/location/to/missing/library/`) +* PYTHON_INCLUDE_DIR (`/location/to/python/include/`) +* PYTHON_LIBRARY (`/location/to/python/lib.a`) +* PYTHON_FRAMEWORKS (`/usr/local/Cellar/python3/3.3.2/Frameworks/Python.framework/`) +* CMAKE_CXX_COMPILER (`/location/to/mingw/g++`) +* CMAKE_C_COMPILER (`/location/to/mingw/gcc`) + +## Obtaining Source Code + +The first step in installing libopenshot is to obtain the most recent source code. The source code is available on [GitHub](https://github.com/OpenShot/libopenshot). Use the following command to obtain the latest libopenshot source code. + +``` +git clone https://github.com/OpenShot/libopenshot.git +git clone https://github.com/OpenShot/libopenshot-audio.git +``` + +## Folder Structure (libopenshot) + +The source code is divided up into the following folders. + +* ### build/ + * This folder needs to be manually created, and is used by cmake to store the temporary build files, such as makefiles, as well as the final binaries (library and test executables). + +* ### cmake/ + * This folder contains custom modules not included by default in cmake, used to find dependency libraries and headers and determine if these libraries are installed. + +* ### doc/ + * This folder contains documentation and related files, such as logos and images required by the doxygen auto-generated documentation. + +* ### include/ + * This folder contains all headers (*.h) used by libopenshot. + +* ### src/ + * This folder contains all source code (*.cpp) used by libopenshot. + +* ### tests/ + * This folder contains all unit test code. Each class has it’s own test file (*.cpp), and uses UnitTest++ macros to keep the test code simple and manageable. + +* ### thirdparty/ + * This folder contains code not written by the OpenShot team. For example, jsoncpp, an open-source JSON parser. + +## Linux Build Instructions (libopenshot-audio) +To compile libopenshot-audio, we need to go through a few additional steps to manually build and install it. Launch a terminal and enter: + +``` +cd [libopenshot-audio repo folder] +mkdir build +cd build +cmake ../ +make +make install +./src/openshot-audio-test-sound (This should play a test sound) +``` + +## Linux Build Instructions (libopenshot) +Run the following commands to compile libopenshot: + +``` +cd [libopenshot repo directory] +mkdir -p build +cd build +cmake ../ +make +make install +``` + +For more detailed instructions, please see: + +* doc/INSTALL-LINUX.md +* doc/INSTALL-MAC.md +* doc/INSTALL-WINDOWS.md diff -Nru libopenshot-0.2.2+dfsg1/.project libopenshot-0.2.5+dfsg1/.project --- libopenshot-0.2.2+dfsg1/.project 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/.project 1970-01-01 00:00:00.000000000 +0000 @@ -1,94 +0,0 @@ - - - libopenshot - - - - - - com.aptana.ide.core.unifiedBuilder - - - - - org.python.pydev.PyDevBuilder - - - - - org.eclipse.cdt.managedbuilder.core.genmakebuilder - clean,full,incremental, - - - ?name? - - - - org.eclipse.cdt.make.core.append_environment - true - - - org.eclipse.cdt.make.core.autoBuildTarget - all - - - org.eclipse.cdt.make.core.buildArguments - - - - org.eclipse.cdt.make.core.buildCommand - make - - - org.eclipse.cdt.make.core.buildLocation - ${workspace_loc:/libopenshot/build} - - - org.eclipse.cdt.make.core.cleanBuildTarget - clean - - - org.eclipse.cdt.make.core.contents - org.eclipse.cdt.make.core.activeConfigSettings - - - org.eclipse.cdt.make.core.enableAutoBuild - false - - - org.eclipse.cdt.make.core.enableCleanBuild - true - - - org.eclipse.cdt.make.core.enableFullBuild - true - - - org.eclipse.cdt.make.core.fullBuildTarget - all - - - org.eclipse.cdt.make.core.stopOnError - true - - - org.eclipse.cdt.make.core.useDefaultBuildCmd - true - - - - - org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder - - - - - - org.eclipse.cdt.core.cnature - org.eclipse.cdt.core.ccnature - org.eclipse.cdt.managedbuilder.core.managedBuildNature - org.eclipse.cdt.managedbuilder.core.ScannerConfigNature - org.python.pydev.pythonNature - com.aptana.ruby.core.rubynature - - diff -Nru libopenshot-0.2.2+dfsg1/README libopenshot-0.2.5+dfsg1/README --- libopenshot-0.2.2+dfsg1/README 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/README 1970-01-01 00:00:00.000000000 +0000 @@ -1,66 +0,0 @@ -#################################################################### - OpenShot Library -#################################################################### - -OpenShot Library (libopenshot) is an open-source project dedicated to -delivering high quality video editing, animation, and playback solutions -to the world. For more information visit . - -#################################################################### - License -#################################################################### - -Copyright (c) 2008-2014 OpenShot Studios, LLC -. - -OpenShot Library (libopenshot) is free software: you can redistribute it -and/or modify it under the terms of the GNU Lesser General Public License -as published by the Free Software Foundation, either version 3 of the -License, or (at your option) any later version. - -OpenShot Library (libopenshot) is distributed in the hope that it will be -useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with OpenShot Library. If not, see . - -#################################################################### - - To release a closed-source product which uses libopenshot (i.e. video - editing and playback), commercial licenses are available: contact - sales@openshot.org for more information. - - -#################################################################### - Install -#################################################################### - -Please see /doc/InstallationGuide.pdf for a very detailed -Linux, Mac, and Windows compiling instruction guide. An online version -is also available: -https://docs.google.com/document/d/1V6nq-IuS9zxqO1-OSt8iTS_cw_HMCpsUNofHLYtUNjM/pub - - -#################################################################### - Documentation -#################################################################### - -Documentation is auto-generated by Doxygen, and can be created with -$ make doc (Also available online: ) - - -#################################################################### - Authors -#################################################################### - -Please see AUTHORS file for a full list of authors. - - -#################################################################### - www.openshot.org | www.openshotstudios.com -#################################################################### - - Copyright (c) 2008-2014 OpenShot Studios, LLC - . diff -Nru libopenshot-0.2.2+dfsg1/README.md libopenshot-0.2.5+dfsg1/README.md --- libopenshot-0.2.2+dfsg1/README.md 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/README.md 2020-03-03 08:00:06.000000000 +0000 @@ -0,0 +1,92 @@ +OpenShot Video Library (libopenshot) is a free, open-source C++ library dedicated to +delivering high quality video editing, animation, and playback solutions to the +world. + +## Build Status + +[![Build Status](https://img.shields.io/travis/OpenShot/libopenshot/develop.svg?label=libopenshot)](https://travis-ci.org/OpenShot/libopenshot) [![Build Status](https://img.shields.io/travis/OpenShot/libopenshot-audio/develop.svg?label=libopenshot-audio)](https://travis-ci.org/OpenShot/libopenshot-audio) + +## Features + +* Cross-Platform (Linux, Mac, and Windows) +* Multi-Layer Compositing +* Video and Audio Effects (Chroma Key, Color Adjustment, Grayscale, etc…) +* Animation Curves (Bézier, Linear, Constant) +* Time Mapping (Curve-based Slow Down, Speed Up, Reverse) +* Audio Mixing & Resampling (Curve-based) +* Audio Plug-ins (VST & AU) +* Audio Drivers (ASIO, WASAPI, DirectSound, CoreAudio, iPhone Audio, ALSA, JACK, and Android) +* Telecine and Inverse Telecine (Film to TV, TV to Film) +* Frame Rate Conversions +* Multi-Processor Support (Performance) +* Python and Ruby Bindings (All Features Supported) +* Qt Video Player Included (Ability to display video on any QWidget) +* Unit Tests (Stability) +* All FFmpeg Formats and Codecs Supported (Images, Videos, and Audio files) +* Full Documentation with Examples (Doxygen Generated) + +## Install + +Detailed instructions for building libopenshot and libopenshot-audio for each OS. These instructions +are also available in the /docs/ source folder. + + * [Linux](https://github.com/OpenShot/libopenshot/wiki/Linux-Build-Instructions) + * [Mac](https://github.com/OpenShot/libopenshot/wiki/Mac-Build-Instructions) + * [Windows](https://github.com/OpenShot/libopenshot/wiki/Windows-Build-Instructions) + +## Hardware Acceleration + +OpenShot now supports experimental hardware acceleration, both for encoding and +decoding videos. When enabled, this can either speed up those operations or slow +them down, depending on the power and features supported by your graphics card. +Please see [doc/HW-ACCELL.md](doc/HW-ACCEL.md) for more information. + +## Documentation + +Beautiful HTML documentation can be generated using Doxygen. +``` +make doc +``` +(Also available online: http://openshot.org/files/libopenshot/) + +## Developers + +Are you interested in becoming more involved in the development of +OpenShot? Build exciting new features, fix bugs, make friends, and become a hero! +Please read the [step-by-step](https://github.com/OpenShot/openshot-qt/wiki/Become-a-Developer) +instructions for getting source code, configuring dependencies, and building OpenShot. + +## Report a bug + +You can report a new libopenshot issue directly on GitHub: + +https://github.com/OpenShot/libopenshot/issues + +## Websites + +- https://www.openshot.org/ (Official website and blog) +- https://github.com/OpenShot/libopenshot/ (source code and issue tracker) +- https://github.com/OpenShot/libopenshot-audio/ (source code for audio library) +- https://github.com/OpenShot/openshot-qt/ (source code for Qt client) +- https://launchpad.net/openshot/ + +### License + +Copyright (c) 2008-2019 OpenShot Studios, LLC. + +OpenShot Library (libopenshot) is free software: you can redistribute it +and/or modify it under the terms of the GNU Lesser General Public License +as published by the Free Software Foundation, either version 3 of the +License, or (at your option) any later version. + +OpenShot Library (libopenshot) is distributed in the hope that it will be +useful, but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU Lesser General Public License for more details. + +You should have received a copy of the GNU Lesser General Public License +along with OpenShot Library. If not, see http://www.gnu.org/licenses/. + +To release a closed-source product which uses libopenshot (i.e. video +editing and playback), commercial licenses are also available: contact +sales@openshot.org for more information. diff -Nru libopenshot-0.2.2+dfsg1/src/AudioBufferSource.cpp libopenshot-0.2.5+dfsg1/src/AudioBufferSource.cpp --- libopenshot-0.2.2+dfsg1/src/AudioBufferSource.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/AudioBufferSource.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for AudioBufferSource class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -31,7 +34,7 @@ using namespace openshot; // Default constructor -AudioBufferSource::AudioBufferSource(AudioSampleBuffer *audio_buffer) +AudioBufferSource::AudioBufferSource(juce::AudioSampleBuffer *audio_buffer) : position(0), start(0), repeat(false), buffer(audio_buffer) { } @@ -40,10 +43,10 @@ { // forget the AudioSampleBuffer. It still exists; we just don't know about it. buffer = NULL; -}; +} // Get the next block of audio samples -void AudioBufferSource::getNextAudioBlock (const AudioSourceChannelInfo& info) +void AudioBufferSource::getNextAudioBlock (const juce::AudioSourceChannelInfo& info) { int buffer_samples = buffer->getNumSamples(); int buffer_channels = buffer->getNumChannels(); @@ -95,7 +98,7 @@ void AudioBufferSource::releaseResources() { } // Set the next read position of this source -void AudioBufferSource::setNextReadPosition (int64 newPosition) +void AudioBufferSource::setNextReadPosition (juce::int64 newPosition) { // set position (if the new position is in range) if (newPosition >= 0 && newPosition < buffer->getNumSamples()) @@ -103,14 +106,14 @@ } // Get the next read position of this source -int64 AudioBufferSource::getNextReadPosition() const +juce::int64 AudioBufferSource::getNextReadPosition() const { // return the next read position return position; } // Get the total length (in samples) of this audio source -int64 AudioBufferSource::getTotalLength() const +juce::int64 AudioBufferSource::getTotalLength() const { // Get the length return buffer->getNumSamples(); @@ -131,7 +134,7 @@ } // Use a different AudioSampleBuffer for this source -void AudioBufferSource::setBuffer (AudioSampleBuffer *audio_buffer) +void AudioBufferSource::setBuffer (juce::AudioSampleBuffer *audio_buffer) { buffer = audio_buffer; setNextReadPosition(0); diff -Nru libopenshot-0.2.2+dfsg1/src/AudioReaderSource.cpp libopenshot-0.2.5+dfsg1/src/AudioReaderSource.cpp --- libopenshot-0.2.2+dfsg1/src/AudioReaderSource.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/AudioReaderSource.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for AudioReaderSource class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -48,7 +51,7 @@ // Clear and delete the buffer delete buffer; buffer = NULL; -}; +} // Get more samples from the reader void AudioReaderSource::GetMoreSamplesFromReader() @@ -63,7 +66,7 @@ } // Debug - ZmqLogger::Instance()->AppendDebugMethod("AudioReaderSource::GetMoreSamplesFromReader", "amount_needed", amount_needed, "amount_remaining", amount_remaining, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("AudioReaderSource::GetMoreSamplesFromReader", "amount_needed", amount_needed, "amount_remaining", amount_remaining); // Init estimated buffer equal to the current frame position (before getting more samples) estimated_frame = frame_number; @@ -146,10 +149,10 @@ int channels = buffer->getNumChannels(); // Debug - ZmqLogger::Instance()->AppendDebugMethod("AudioReaderSource::reverse_buffer", "number_of_samples", number_of_samples, "channels", channels, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("AudioReaderSource::reverse_buffer", "number_of_samples", number_of_samples, "channels", channels); // Reverse array (create new buffer to hold the reversed version) - AudioSampleBuffer *reversed = new juce::AudioSampleBuffer(channels, number_of_samples); + juce::AudioSampleBuffer *reversed = new juce::AudioSampleBuffer(channels, number_of_samples); reversed->clear(); for (int channel = 0; channel < channels; channel++) @@ -174,7 +177,7 @@ } // Get the next block of audio samples -void AudioReaderSource::getNextAudioBlock(const AudioSourceChannelInfo& info) +void AudioReaderSource::getNextAudioBlock(const juce::AudioSourceChannelInfo& info) { int buffer_samples = buffer->getNumSamples(); int buffer_channels = buffer->getNumChannels(); @@ -245,7 +248,7 @@ void AudioReaderSource::releaseResources() { } // Set the next read position of this source -void AudioReaderSource::setNextReadPosition (int64 newPosition) +void AudioReaderSource::setNextReadPosition (juce::int64 newPosition) { // set position (if the new position is in range) if (newPosition >= 0 && newPosition < buffer->getNumSamples()) @@ -253,14 +256,14 @@ } // Get the next read position of this source -int64 AudioReaderSource::getNextReadPosition() const +juce::int64 AudioReaderSource::getNextReadPosition() const { // return the next read position return position; } // Get the total length (in samples) of this audio source -int64 AudioReaderSource::getTotalLength() const +juce::int64 AudioReaderSource::getTotalLength() const { // Get the length if (reader) @@ -284,7 +287,7 @@ } // Update the internal buffer used by this source -void AudioReaderSource::setBuffer (AudioSampleBuffer *audio_buffer) +void AudioReaderSource::setBuffer (juce::AudioSampleBuffer *audio_buffer) { buffer = audio_buffer; setNextReadPosition(0); diff -Nru libopenshot-0.2.2+dfsg1/src/AudioResampler.cpp libopenshot-0.2.5+dfsg1/src/AudioResampler.cpp --- libopenshot-0.2.2+dfsg1/src/AudioResampler.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/AudioResampler.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for AudioResampler class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -46,10 +49,10 @@ buffer_source = new AudioBufferSource(buffer); // Init resampling source - resample_source = new ResamplingAudioSource(buffer_source, false, 2); + resample_source = new juce::ResamplingAudioSource(buffer_source, false, 2); // Init resampled buffer - resampled_buffer = new AudioSampleBuffer(2, 1); + resampled_buffer = new juce::AudioSampleBuffer(2, 1); resampled_buffer->clear(); // Init callback buffer @@ -71,12 +74,12 @@ } // Sets the audio buffer and updates the key settings -void AudioResampler::SetBuffer(AudioSampleBuffer *new_buffer, double sample_rate, double new_sample_rate) +void AudioResampler::SetBuffer(juce::AudioSampleBuffer *new_buffer, double sample_rate, double new_sample_rate) { if (sample_rate <= 0) - sample_rate == 44100; + sample_rate = 44100; if (new_sample_rate <= 0) - new_sample_rate == 44100; + new_sample_rate = 44100; // Set the sample ratio (the ratio of sample rate change) source_ratio = sample_rate / new_sample_rate; @@ -86,7 +89,7 @@ } // Sets the audio buffer and key settings -void AudioResampler::SetBuffer(AudioSampleBuffer *new_buffer, double ratio) +void AudioResampler::SetBuffer(juce::AudioSampleBuffer *new_buffer, double ratio) { // Update buffer & buffer source buffer = new_buffer; @@ -117,7 +120,7 @@ } // Get the resampled audio buffer -AudioSampleBuffer* AudioResampler::GetResampledBuffer() +juce::AudioSampleBuffer* AudioResampler::GetResampledBuffer() { // Resample the current frame's audio buffer (into the temp callback buffer) resample_source->getNextAudioBlock(resample_callback_buffer); diff -Nru libopenshot-0.2.2+dfsg1/src/bindings/CMakeLists.txt libopenshot-0.2.5+dfsg1/src/bindings/CMakeLists.txt --- libopenshot-0.2.2+dfsg1/src/bindings/CMakeLists.txt 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/bindings/CMakeLists.txt 2020-03-03 08:00:06.000000000 +0000 @@ -4,7 +4,7 @@ # # @section LICENSE # -# Copyright (c) 2008-2014 OpenShot Studios, LLC +# Copyright (c) 2008-2019 OpenShot Studios, LLC # . This file is part of # OpenShot Library (libopenshot), an open-source project dedicated to # delivering high quality video editing and animation solutions to the diff -Nru libopenshot-0.2.2+dfsg1/src/bindings/python/CMakeLists.txt libopenshot-0.2.5+dfsg1/src/bindings/python/CMakeLists.txt --- libopenshot-0.2.2+dfsg1/src/bindings/python/CMakeLists.txt 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/bindings/python/CMakeLists.txt 2020-03-03 08:00:06.000000000 +0000 @@ -4,10 +4,10 @@ # # @section LICENSE # -# Copyright (c) 2008-2014 OpenShot Studios, LLC +# Copyright (c) 2008-2019 OpenShot Studios, LLC # . This file is part of -# OpenShot Library (libopenshot), an open-source project dedicated to -# delivering high quality video editing and animation solutions to the +# OpenShot Library (libopenshot), an open-source project dedicated to +# delivering high quality video editing and animation solutions to the # world. For more information visit . # # OpenShot Library (libopenshot) is free software: you can redistribute it @@ -26,40 +26,81 @@ ############### SWIG PYTHON BINDINGS ################ -FIND_PACKAGE(SWIG 2.0 REQUIRED) +FIND_PACKAGE(SWIG 3.0 REQUIRED) INCLUDE(${SWIG_USE_FILE}) -FIND_PACKAGE(PythonLibs 3) +### Enable some legacy SWIG behaviors, in newer CMAKEs +if (POLICY CMP0078) + cmake_policy(SET CMP0078 OLD) +endif() +if (POLICY CMP0086) + cmake_policy(SET CMP0086 OLD) +endif() + FIND_PACKAGE(PythonInterp 3) -IF (PYTHONLIBS_FOUND) - IF (PYTHONINTERP_FOUND) +FIND_PACKAGE(PythonLibs 3) +if (PYTHONLIBS_FOUND AND PYTHONINTERP_FOUND) - ### Include Python header files - INCLUDE_DIRECTORIES(${PYTHON_INCLUDE_PATH}) - INCLUDE_DIRECTORIES(${CMAKE_CURRENT_SOURCE_DIR}) - - ### Enable C++ support in SWIG - SET_SOURCE_FILES_PROPERTIES(openshot.i PROPERTIES CPLUSPLUS ON) - SET(CMAKE_SWIG_FLAGS "") - - ### Add the SWIG interface file (which defines all the SWIG methods) - SWIG_ADD_MODULE(openshot python openshot.i) - - ### Link the new python wrapper library with libopenshot - SWIG_LINK_LIBRARIES(openshot ${PYTHON_LIBRARIES} openshot) - - ### FIND THE PYTHON INTERPRETER (AND THE SITE PACKAGES FOLDER) - EXECUTE_PROCESS ( COMMAND ${PYTHON_EXECUTABLE} -c "import site; print(site.getsitepackages()[0])" - OUTPUT_VARIABLE _ABS_PYTHON_MODULE_PATH - OUTPUT_STRIP_TRAILING_WHITESPACE ) - GET_FILENAME_COMPONENT(_ABS_PYTHON_MODULE_PATH "${_ABS_PYTHON_MODULE_PATH}" ABSOLUTE) - FILE(RELATIVE_PATH _REL_PYTHON_MODULE_PATH ${CMAKE_INSTALL_PREFIX} ${_ABS_PYTHON_MODULE_PATH}) - SET(PYTHON_MODULE_PATH ${_REL_PYTHON_MODULE_PATH}) - - ############### INSTALL HEADERS & LIBRARY ################ - ### Install Python bindings - INSTALL(TARGETS _openshot DESTINATION ${PYTHON_MODULE_PATH} ) - INSTALL(FILES ${CMAKE_CURRENT_BINARY_DIR}/openshot.py DESTINATION ${PYTHON_MODULE_PATH} ) + ### Include Python header files + INCLUDE_DIRECTORIES(${PYTHON_INCLUDE_PATH}) + INCLUDE_DIRECTORIES(${CMAKE_CURRENT_SOURCE_DIR}) + + ### Enable C++ support in SWIG + set_property(SOURCE openshot.i PROPERTY CPLUSPLUS ON) + set_property(SOURCE openshot.i PROPERTY SWIG_MODULE_NAME openshot) + + ### Suppress a ton of warnings in the generated SWIG C++ code + set(SWIG_CXX_FLAGS "-Wno-unused-variable -Wno-unused-function -Wno-deprecated-copy -Wno-class-memaccess -Wno-cast-function-type \ +-Wno-unused-parameter -Wno-catch-value -Wno-sign-compare -Wno-ignored-qualifiers") + separate_arguments(sw_flags UNIX_COMMAND ${SWIG_CXX_FLAGS}) + set_property(SOURCE openshot.i PROPERTY GENERATED_COMPILE_OPTIONS ${sw_flags}) + + ### Take include dirs from target, automatically if possible + if (CMAKE_VERSION VERSION_GREATER 3.13) + set_property(SOURCE openshot.i PROPERTY USE_TARGET_INCLUDE_DIRECTORIES True) + else () + set_property(SOURCE openshot.i PROPERTY INCLUDE_DIRECTORIES $) + endif () + + ### Add the SWIG interface file (which defines all the SWIG methods) + if (CMAKE_VERSION VERSION_LESS 3.8.0) + swig_add_module(pyopenshot python openshot.i) + else() + swig_add_library(pyopenshot LANGUAGE python SOURCES openshot.i) + endif() + + ### Set output name of target + set_target_properties(${SWIG_MODULE_pyopenshot_REAL_NAME} PROPERTIES + PREFIX "_" OUTPUT_NAME "openshot") + + ### Link the new python wrapper library with libopenshot + target_link_libraries(${SWIG_MODULE_pyopenshot_REAL_NAME} + PUBLIC ${PYTHON_LIBRARIES} openshot) + + ### Check if the following Debian-friendly python module path exists + SET(PYTHON_MODULE_PATH "${CMAKE_INSTALL_PREFIX}/lib/python${PYTHON_VERSION_MAJOR}.${PYTHON_VERSION_MINOR}/dist-packages") + if (NOT EXISTS ${PYTHON_MODULE_PATH}) + + ### Calculate the python module path (using distutils) + execute_process ( COMMAND ${PYTHON_EXECUTABLE} -c "\ +from distutils.sysconfig import get_python_lib; \ +print( get_python_lib( plat_specific=True, prefix='${CMAKE_INSTALL_PREFIX}' ) )" + OUTPUT_VARIABLE _ABS_PYTHON_MODULE_PATH + OUTPUT_STRIP_TRAILING_WHITESPACE ) + + GET_FILENAME_COMPONENT(_ABS_PYTHON_MODULE_PATH + "${_ABS_PYTHON_MODULE_PATH}" ABSOLUTE) + FILE(RELATIVE_PATH _REL_PYTHON_MODULE_PATH + ${CMAKE_INSTALL_PREFIX} ${_ABS_PYTHON_MODULE_PATH}) + SET(PYTHON_MODULE_PATH ${_ABS_PYTHON_MODULE_PATH}) + endif() + message("PYTHON_MODULE_PATH: ${PYTHON_MODULE_PATH}") + + ############### INSTALL HEADERS & LIBRARY ################ + ### Install Python bindings + INSTALL(TARGETS ${SWIG_MODULE_pyopenshot_REAL_NAME} + LIBRARY DESTINATION ${PYTHON_MODULE_PATH} ) + INSTALL(FILES ${CMAKE_CURRENT_BINARY_DIR}/openshot.py + DESTINATION ${PYTHON_MODULE_PATH} ) - ENDIF(PYTHONINTERP_FOUND) -ENDIF (PYTHONLIBS_FOUND) +endif () diff -Nru libopenshot-0.2.2+dfsg1/src/bindings/python/openshot.i libopenshot-0.2.5+dfsg1/src/bindings/python/openshot.i --- libopenshot-0.2.2+dfsg1/src/bindings/python/openshot.i 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/bindings/python/openshot.i 2020-03-03 08:00:06.000000000 +0000 @@ -4,10 +4,10 @@ # # @section LICENSE # -# Copyright (c) 2008-2014 OpenShot Studios, LLC +# Copyright (c) 2008-2019 OpenShot Studios, LLC # . This file is part of -# OpenShot Library (libopenshot), an open-source project dedicated to -# delivering high quality video editing and animation solutions to the +# OpenShot Library (libopenshot), an open-source project dedicated to +# delivering high quality video editing and animation solutions to the # world. For more information visit . # # OpenShot Library (libopenshot) is free software: you can redistribute it @@ -27,6 +27,12 @@ %module openshot +/* Suppress warnings about ignored operator= */ +%warnfilter(362); + +/* Don't generate multiple wrappers for functions with default args */ +%feature("compactdefaultargs", "1"); + /* Enable inline documentation */ %feature("autodoc", "1"); @@ -50,10 +56,9 @@ #endif %shared_ptr(juce::AudioSampleBuffer) %shared_ptr(openshot::Frame) -%shared_ptr(Frame) %{ -#include "../../../include/Version.h" +#include "OpenShotVersion.h" #include "../../../include/ReaderBase.h" #include "../../../include/WriterBase.h" #include "../../../include/CacheBase.h" @@ -80,12 +85,16 @@ #include "../../../include/PlayerBase.h" #include "../../../include/Point.h" #include "../../../include/Profiles.h" +#include "../../../include/QtHtmlReader.h" #include "../../../include/QtImageReader.h" #include "../../../include/QtPlayer.h" +#include "../../../include/QtTextReader.h" #include "../../../include/KeyFrame.h" #include "../../../include/RendererBase.h" +#include "../../../include/Settings.h" #include "../../../include/Timeline.h" #include "../../../include/ZmqLogger.h" +#include "../../../include/AudioDeviceInfo.h" %} @@ -115,7 +124,48 @@ } } -%include "../../../include/Version.h" +/* Instantiate the required template specializations */ +%template() std::map; + +/* Make openshot.Fraction more Pythonic */ +%extend openshot::Fraction { +%{ + #include + #include +%} + double __float__() { + return $self->ToDouble(); + } + int __int__() { + return $self->ToInt(); + } + std::map GetMap() { + std::map map1; + map1.insert({"num", $self->num}); + map1.insert({"den", $self->den}); + return map1; + } + std::string __repr__() { + std::ostringstream result; + result << $self->num << ":" << $self->den; + return result.str(); + } +} + +%extend openshot::OpenShotVersion { + // Give the struct a string representation + const std::string __str__() { + return std::string(OPENSHOT_VERSION_FULL); + } + // And a repr for interactive use + const std::string __repr__() { + std::ostringstream result; + result << "OpenShotVersion('" << OPENSHOT_VERSION_FULL << "')"; + return result.str(); + } +} + +%include "OpenShotVersion.h" %include "../../../include/ReaderBase.h" %include "../../../include/WriterBase.h" %include "../../../include/CacheBase.h" @@ -146,12 +196,16 @@ %include "../../../include/PlayerBase.h" %include "../../../include/Point.h" %include "../../../include/Profiles.h" +%include "../../../include/QtHtmlReader.h" %include "../../../include/QtImageReader.h" %include "../../../include/QtPlayer.h" +%include "../../../include/QtTextReader.h" %include "../../../include/KeyFrame.h" %include "../../../include/RendererBase.h" +%include "../../../include/Settings.h" %include "../../../include/Timeline.h" %include "../../../include/ZmqLogger.h" +%include "../../../include/AudioDeviceInfo.h" #ifdef USE_IMAGEMAGICK %include "../../../include/ImageReader.h" @@ -177,12 +231,11 @@ /* Wrap std templates (list, vector, etc...) */ -namespace std { - %template(ClipList) list; - %template(EffectBaseList) list; - %template(CoordinateVector) vector; - %template(PointsVector) vector; - %template(FieldVector) vector; - %template(MappedFrameVector) vector; - %template(MappedMetadata) map; -} +%template(ClipList) std::list; +%template(EffectBaseList) std::list; +%template(CoordinateVector) std::vector; +%template(PointsVector) std::vector; +%template(FieldVector) std::vector; +%template(MappedFrameVector) std::vector; +%template(MappedMetadata) std::map; +%template(AudioDeviceInfoVector) std::vector; diff -Nru libopenshot-0.2.2+dfsg1/src/bindings/ruby/CMakeLists.txt libopenshot-0.2.5+dfsg1/src/bindings/ruby/CMakeLists.txt --- libopenshot-0.2.2+dfsg1/src/bindings/ruby/CMakeLists.txt 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/bindings/ruby/CMakeLists.txt 2020-03-03 08:00:06.000000000 +0000 @@ -4,10 +4,10 @@ # # @section LICENSE # -# Copyright (c) 2008-2014 OpenShot Studios, LLC +# Copyright (c) 2008-2019 OpenShot Studios, LLC # . This file is part of -# OpenShot Library (libopenshot), an open-source project dedicated to -# delivering high quality video editing and animation solutions to the +# OpenShot Library (libopenshot), an open-source project dedicated to +# delivering high quality video editing and animation solutions to the # world. For more information visit . # # OpenShot Library (libopenshot) is free software: you can redistribute it @@ -24,40 +24,68 @@ # along with OpenShot Library. If not, see . ################################################################################ - ############### RUBY BINDINGS ################ -FIND_PACKAGE(SWIG 2.0 REQUIRED) +FIND_PACKAGE(SWIG 3.0 REQUIRED) INCLUDE(${SWIG_USE_FILE}) +### Enable some legacy SWIG behaviors, in newer CMAKEs +if (POLICY CMP0078) + cmake_policy(SET CMP0078 OLD) +endif() +if (POLICY CMP0086) + cmake_policy(SET CMP0086 OLD) +endif() + FIND_PACKAGE(Ruby) IF (RUBY_FOUND) ### Include the Ruby header files INCLUDE_DIRECTORIES(${RUBY_INCLUDE_DIRS}) INCLUDE_DIRECTORIES(${CMAKE_CURRENT_SOURCE_DIR}) - + ### Enable C++ in SWIG - SET_SOURCE_FILES_PROPERTIES(openshot.i PROPERTIES CPLUSPLUS ON) - SET(CMAKE_SWIG_FLAGS "") + set_property(SOURCE openshot.i PROPERTY CPLUSPLUS ON) + set_property(SOURCE openshot.i PROPERTY SWIG_MODULE_NAME openshot) + + ### Suppress a ton of warnings in the generated SWIG C++ code + set(SWIG_CXX_FLAGS "-Wno-unused-variable -Wno-unused-function -Wno-deprecated-copy -Wno-class-memaccess -Wno-cast-function-type \ +-Wno-unused-parameter -Wno-catch-value -Wno-sign-compare -Wno-ignored-qualifiers") + separate_arguments(sw_flags UNIX_COMMAND ${SWIG_CXX_FLAGS}) + set_property(SOURCE openshot.i PROPERTY GENERATED_COMPILE_OPTIONS ${sw_flags}) + + ### Take include dirs from target, automatically if possible + if (CMAKE_VERSION VERSION_GREATER 3.13) + set_property(SOURCE openshot.i PROPERTY USE_TARGET_INCLUDE_DIRECTORIES True) + else () + set_property(SOURCE openshot.i PROPERTY INCLUDE_DIRECTORIES $) + endif () ### Add the SWIG interface file (which defines all the SWIG methods) - SWIG_ADD_MODULE(rbopenshot ruby openshot.i) - + if (CMAKE_VERSION VERSION_LESS 3.8.0) + swig_add_module(rbopenshot ruby openshot.i) + else() + swig_add_library(rbopenshot LANGUAGE ruby SOURCES openshot.i) + endif() + ### Set name of target (with no prefix, since Ruby does not like that) - SET_TARGET_PROPERTIES(rbopenshot PROPERTIES PREFIX "" OUTPUT_NAME "openshot") + SET_TARGET_PROPERTIES(${SWIG_MODULE_rbopenshot_REAL_NAME} PROPERTIES + PREFIX "" OUTPUT_NAME "openshot") ### Link the new Ruby wrapper library with libopenshot - SWIG_LINK_LIBRARIES(rbopenshot ${RUBY_LIBRARY} openshot) - + target_link_libraries(${SWIG_MODULE_rbopenshot_REAL_NAME} + ${RUBY_LIBRARY} openshot) + ### FIND THE RUBY INTERPRETER (AND THE LOAD_PATH FOLDER) - EXECUTE_PROCESS(COMMAND ${RUBY_EXECUTABLE} -r rbconfig -e "print RbConfig::CONFIG['vendorarchdir']" OUTPUT_VARIABLE RUBY_VENDOR_ARCH_DIR) + EXECUTE_PROCESS(COMMAND ${RUBY_EXECUTABLE} + -r rbconfig -e "print RbConfig::CONFIG['vendorarchdir']" + OUTPUT_VARIABLE RUBY_VENDOR_ARCH_DIR) MESSAGE(STATUS "Ruby executable: ${RUBY_EXECUTABLE}") MESSAGE(STATUS "Ruby vendor arch dir: ${RUBY_VENDOR_ARCH_DIR}") MESSAGE(STATUS "Ruby include path: ${RUBY_INCLUDE_PATH}") - ############### INSTALL HEADERS & LIBRARY ################ # Install Ruby bindings - INSTALL(TARGETS rbopenshot LIBRARY DESTINATION ${RUBY_VENDOR_ARCH_DIR}) - + install(TARGETS ${SWIG_MODULE_rbopenshot_REAL_NAME} + LIBRARY DESTINATION ${RUBY_VENDOR_ARCH_DIR} ) + ENDIF (RUBY_FOUND) diff -Nru libopenshot-0.2.2+dfsg1/src/bindings/ruby/openshot.i libopenshot-0.2.5+dfsg1/src/bindings/ruby/openshot.i --- libopenshot-0.2.2+dfsg1/src/bindings/ruby/openshot.i 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/bindings/ruby/openshot.i 2020-03-03 08:00:06.000000000 +0000 @@ -4,10 +4,10 @@ # # @section LICENSE # -# Copyright (c) 2008-2014 OpenShot Studios, LLC +# Copyright (c) 2008-2019 OpenShot Studios, LLC # . This file is part of -# OpenShot Library (libopenshot), an open-source project dedicated to -# delivering high quality video editing and animation solutions to the +# OpenShot Library (libopenshot), an open-source project dedicated to +# delivering high quality video editing and animation solutions to the # world. For more information visit . # # OpenShot Library (libopenshot) is free software: you can redistribute it @@ -27,6 +27,12 @@ %module openshot +/* Suppress warnings about ignored operator= */ +%warnfilter(362); + +/* Don't generate multiple wrappers for functions with default args */ +%feature("compactdefaultargs", "1"); + /* Enable inline documentation */ %feature("autodoc", "1"); @@ -53,11 +59,17 @@ #endif %template(SPtrAudioBuffer) std::shared_ptr; %template(SPtrOpenFrame) std::shared_ptr; -%template(SPtrFrame) std::shared_ptr; - %{ -#include "../../../include/Version.h" +/* Ruby and FFmpeg define competing RSHIFT macros, + * so we move Ruby's out of the way for now. We'll + * restore it after dealing with FFmpeg's + */ +#ifdef RSHIFT + #define RB_RSHIFT(a, b) RSHIFT(a, b) + #undef RSHIFT +#endif +#include "OpenShotVersion.h" #include "../../../include/ReaderBase.h" #include "../../../include/WriterBase.h" #include "../../../include/CacheBase.h" @@ -84,13 +96,26 @@ #include "../../../include/PlayerBase.h" #include "../../../include/Point.h" #include "../../../include/Profiles.h" +#include "../../../include/QtHtmlReader.h" #include "../../../include/QtImageReader.h" #include "../../../include/QtPlayer.h" +#include "../../../include/QtTextReader.h" #include "../../../include/KeyFrame.h" #include "../../../include/RendererBase.h" +#include "../../../include/Settings.h" #include "../../../include/Timeline.h" #include "../../../include/ZmqLogger.h" +#include "../../../include/AudioDeviceInfo.h" +/* Move FFmpeg's RSHIFT to FF_RSHIFT, if present */ +#ifdef RSHIFT + #define FF_RSHIFT(a, b) RSHIFT(a, b) + #undef RSHIFT +#endif +/* And restore Ruby's RSHIFT, if we captured it */ +#ifdef RB_RSHIFT + #define RSHIFT(a, b) RB_RSHIFT(a, b) +#endif %} #ifdef USE_BLACKMAGIC @@ -108,7 +133,7 @@ %} #endif -%include "../../../include/Version.h" +%include "OpenShotVersion.h" %include "../../../include/ReaderBase.h" %include "../../../include/WriterBase.h" %include "../../../include/CacheBase.h" @@ -131,20 +156,45 @@ %include "../../../include/EffectInfo.h" %include "../../../include/Enums.h" %include "../../../include/Exceptions.h" + +/* Ruby and FFmpeg define competing RSHIFT macros, + * so we move Ruby's out of the way for now. We'll + * restore it after dealing with FFmpeg's + */ +#ifdef RSHIFT + #define RB_RSHIFT(a, b) RSHIFT(a, b) + #undef RSHIFT +#endif + %include "../../../include/FFmpegReader.h" %include "../../../include/FFmpegWriter.h" + +/* Move FFmpeg's RSHIFT to FF_RSHIFT, if present */ +#ifdef RSHIFT + #define FF_RSHIFT(a, b) RSHIFT(a, b) + #undef RSHIFT +#endif +/* And restore Ruby's RSHIFT, if we captured it */ +#ifdef RB_RSHIFT + #define RSHIFT(a, b) RB_RSHIFT(a, b) +#endif + %include "../../../include/Fraction.h" %include "../../../include/Frame.h" %include "../../../include/FrameMapper.h" %include "../../../include/PlayerBase.h" %include "../../../include/Point.h" %include "../../../include/Profiles.h" +%include "../../../include/QtHtmlReader.h" %include "../../../include/QtImageReader.h" %include "../../../include/QtPlayer.h" +%include "../../../include/QtTextReader.h" %include "../../../include/KeyFrame.h" %include "../../../include/RendererBase.h" +%include "../../../include/Settings.h" %include "../../../include/Timeline.h" %include "../../../include/ZmqLogger.h" +%include "../../../include/AudioDeviceInfo.h" #ifdef USE_IMAGEMAGICK %include "../../../include/ImageReader.h" @@ -171,12 +221,11 @@ /* Wrap std templates (list, vector, etc...) */ -namespace std { - %template(ClipList) list; - %template(EffectBaseList) list; - %template(CoordinateVector) vector; - %template(PointsVector) vector; - %template(FieldVector) vector; - %template(MappedFrameVector) vector; - %template(MappedMetadata) map; -} +%template(ClipList) std::list; +%template(EffectBaseList) std::list; +%template(CoordinateVector) std::vector; +%template(PointsVector) std::vector; +%template(FieldVector) std::vector; +%template(MappedFrameVector) std::vector; +%template(MappedMetadata) std::map; +%template(AudioDeviceInfoVector) std::vector; diff -Nru libopenshot-0.2.2+dfsg1/src/CacheBase.cpp libopenshot-0.2.5+dfsg1/src/CacheBase.cpp --- libopenshot-0.2.2+dfsg1/src/CacheBase.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/CacheBase.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for CacheBase class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -34,13 +37,13 @@ CacheBase::CacheBase() : max_bytes(0) { // Init the critical section cacheCriticalSection = new CriticalSection(); -}; +} // Constructor that sets the max frames to cache CacheBase::CacheBase(int64_t max_bytes) : max_bytes(max_bytes) { // Init the critical section cacheCriticalSection = new CriticalSection(); -}; +} // Set maximum bytes to a different amount based on a ReaderInfo struct void CacheBase::SetMaxBytesFromInfo(int64_t number_of_frames, int width, int height, int sample_rate, int channels) @@ -50,12 +53,12 @@ SetMaxBytes(bytes); } -// Generate Json::JsonValue for this object +// Generate Json::Value for this object Json::Value CacheBase::JsonValue() { // Create root json object Json::Value root; - stringstream max_bytes_stream; + std::stringstream max_bytes_stream; max_bytes_stream << max_bytes; root["max_bytes"] = max_bytes_stream.str(); @@ -63,10 +66,10 @@ return root; } -// Load Json::JsonValue into this object -void CacheBase::SetJsonValue(Json::Value root) { +// Load Json::Value into this object +void CacheBase::SetJsonValue(const Json::Value root) { // Set data from Json (if key is found) if (!root["max_bytes"].isNull()) - max_bytes = atoll(root["max_bytes"].asString().c_str()); -} \ No newline at end of file + max_bytes = std::stoll(root["max_bytes"].asString()); +} diff -Nru libopenshot-0.2.2+dfsg1/src/CacheDisk.cpp libopenshot-0.2.5+dfsg1/src/CacheDisk.cpp --- libopenshot-0.2.2+dfsg1/src/CacheDisk.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/CacheDisk.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for CacheDisk class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -31,7 +34,7 @@ using namespace openshot; // Default constructor, no max bytes -CacheDisk::CacheDisk(string cache_path, string format, float quality, float scale) : CacheBase(0) { +CacheDisk::CacheDisk(std::string cache_path, std::string format, float quality, float scale) : CacheBase(0) { // Set cache type name cache_type = "CacheDisk"; range_version = 0; @@ -44,10 +47,10 @@ // Init path directory InitPath(cache_path); -}; +} // Constructor that sets the max bytes to cache -CacheDisk::CacheDisk(string cache_path, string format, float quality, float scale, int64_t max_bytes) : CacheBase(max_bytes) { +CacheDisk::CacheDisk(std::string cache_path, std::string format, float quality, float scale, int64_t max_bytes) : CacheBase(max_bytes) { // Set cache type name cache_type = "CacheDisk"; range_version = 0; @@ -59,10 +62,10 @@ // Init path directory InitPath(cache_path); -}; +} // Initialize cache directory -void CacheDisk::InitPath(string cache_path) { +void CacheDisk::InitPath(std::string cache_path) { QString qpath; if (!cache_path.empty()) { @@ -100,25 +103,19 @@ // Increment range version range_version++; - vector::iterator itr_ordered; int64_t starting_frame = *ordered_frame_numbers.begin(); - int64_t ending_frame = *ordered_frame_numbers.begin(); + int64_t ending_frame = starting_frame; // Loop through all known frames (in sequential order) - for (itr_ordered = ordered_frame_numbers.begin(); itr_ordered != ordered_frame_numbers.end(); ++itr_ordered) { - int64_t frame_number = *itr_ordered; + for (const auto frame_number : ordered_frame_numbers) { if (frame_number - ending_frame > 1) { // End of range detected Json::Value range; // Add JSON object with start/end attributes // Use strings, since int64_ts are supported in JSON - stringstream start_str; - start_str << starting_frame; - stringstream end_str; - end_str << ending_frame; - range["start"] = start_str.str(); - range["end"] = end_str.str(); + range["start"] = std::to_string(starting_frame); + range["end"] = std::to_string(ending_frame); ranges.append(range); // Set new starting range @@ -134,12 +131,8 @@ // Add JSON object with start/end attributes // Use strings, since int64_ts are supported in JSON - stringstream start_str; - start_str << starting_frame; - stringstream end_str; - end_str << ending_frame; - range["start"] = start_str.str(); - range["end"] = end_str.str(); + range["start"] = std::to_string(starting_frame); + range["end"] = std::to_string(ending_frame); ranges.append(range); // Cache range JSON as string @@ -299,7 +292,7 @@ std::shared_ptr f; // Loop through frame numbers - deque::iterator itr; + std::deque::iterator itr; int64_t smallest_frame = -1; for(itr = frame_numbers.begin(); itr != frame_numbers.end(); ++itr) { @@ -322,7 +315,7 @@ int64_t total_bytes = 0; // Loop through frames, and calculate total bytes - deque::reverse_iterator itr; + std::deque::reverse_iterator itr; for(itr = frame_numbers.rbegin(); itr != frame_numbers.rend(); ++itr) total_bytes += frame_size_bytes; @@ -342,7 +335,7 @@ const GenericScopedLock lock(*cacheCriticalSection); // Loop through frame numbers - deque::iterator itr; + std::deque::iterator itr; for(itr = frame_numbers.begin(); itr != frame_numbers.end();) { //deque::iterator current = itr++; @@ -355,7 +348,7 @@ } // Loop through ordered frame numbers - vector::iterator itr_ordered; + std::vector::iterator itr_ordered; for(itr_ordered = ordered_frame_numbers.begin(); itr_ordered != ordered_frame_numbers.end();) { if (*itr_ordered >= start_frame_number && *itr_ordered <= end_frame_number) @@ -394,7 +387,7 @@ const GenericScopedLock lock(*cacheCriticalSection); // Loop through frame numbers - deque::iterator itr; + std::deque::iterator itr; for(itr = frame_numbers.begin(); itr != frame_numbers.end(); ++itr) { if (*itr == frame_number) @@ -462,13 +455,13 @@ } // Generate JSON string of this object -string CacheDisk::Json() { +std::string CacheDisk::Json() { // Return formatted string return JsonValue().toStyledString(); } -// Generate Json::JsonValue for this object +// Generate Json::Value for this object Json::Value CacheDisk::JsonValue() { // Process range data (if anything has changed) @@ -480,46 +473,40 @@ root["path"] = path.path().toStdString(); Json::Value version; - stringstream range_version_str; + std::stringstream range_version_str; range_version_str << range_version; root["version"] = range_version_str.str(); // Parse and append range data (if any) - Json::Value ranges; - Json::Reader reader; - bool success = reader.parse( json_ranges, ranges ); - if (success) + // Parse and append range data (if any) + try { + const Json::Value ranges = openshot::stringToJson(json_ranges); root["ranges"] = ranges; + } catch (...) { } // return JsonValue return root; } // Load JSON string into this object -void CacheDisk::SetJson(string value) { +void CacheDisk::SetJson(const std::string value) { // Parse JSON string into JSON objects - Json::Value root; - Json::Reader reader; - bool success = reader.parse( value, root ); - if (!success) - // Raise exception - throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); - try { + const Json::Value root = openshot::stringToJson(value); // Set all values that match SetJsonValue(root); } - catch (exception e) + catch (const std::exception& e) { // Error parsing JSON (or missing keys) - throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); } } -// Load Json::JsonValue into this object -void CacheDisk::SetJsonValue(Json::Value root) { +// Load Json::Value into this object +void CacheDisk::SetJsonValue(const Json::Value root) { // Close timeline before we do anything (this also removes all open and closing clips) Clear(); diff -Nru libopenshot-0.2.2+dfsg1/src/CacheMemory.cpp libopenshot-0.2.5+dfsg1/src/CacheMemory.cpp --- libopenshot-0.2.2+dfsg1/src/CacheMemory.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/CacheMemory.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for Cache class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -36,7 +39,7 @@ cache_type = "CacheMemory"; range_version = 0; needs_range_processing = false; -}; +} // Constructor that sets the max bytes to cache CacheMemory::CacheMemory(int64_t max_bytes) : CacheBase(max_bytes) { @@ -44,7 +47,7 @@ cache_type = "CacheMemory"; range_version = 0; needs_range_processing = false; -}; +} // Default destructor CacheMemory::~CacheMemory() @@ -76,7 +79,7 @@ // Increment range version range_version++; - vector::iterator itr_ordered; + std::vector::iterator itr_ordered; int64_t starting_frame = *ordered_frame_numbers.begin(); int64_t ending_frame = *ordered_frame_numbers.begin(); @@ -89,12 +92,8 @@ // Add JSON object with start/end attributes // Use strings, since int64_ts are supported in JSON - stringstream start_str; - start_str << starting_frame; - stringstream end_str; - end_str << ending_frame; - range["start"] = start_str.str(); - range["end"] = end_str.str(); + range["start"] = std::to_string(starting_frame); + range["end"] = std::to_string(ending_frame); ranges.append(range); // Set new starting range @@ -110,12 +109,8 @@ // Add JSON object with start/end attributes // Use strings, since int64_ts are not supported in JSON - stringstream start_str; - start_str << starting_frame; - stringstream end_str; - end_str << ending_frame; - range["start"] = start_str.str(); - range["end"] = end_str.str(); + range["start"] = std::to_string(starting_frame); + range["end"] = std::to_string(ending_frame); ranges.append(range); // Cache range JSON as string @@ -175,7 +170,7 @@ std::shared_ptr f; // Loop through frame numbers - deque::iterator itr; + std::deque::iterator itr; int64_t smallest_frame = -1; for(itr = frame_numbers.begin(); itr != frame_numbers.end(); ++itr) { @@ -198,7 +193,7 @@ int64_t total_bytes = 0; // Loop through frames, and calculate total bytes - deque::reverse_iterator itr; + std::deque::reverse_iterator itr; for(itr = frame_numbers.rbegin(); itr != frame_numbers.rend(); ++itr) { total_bytes += frames[*itr]->GetBytes(); @@ -220,7 +215,7 @@ const GenericScopedLock lock(*cacheCriticalSection); // Loop through frame numbers - deque::iterator itr; + std::deque::iterator itr; for(itr = frame_numbers.begin(); itr != frame_numbers.end();) { if (*itr >= start_frame_number && *itr <= end_frame_number) @@ -232,7 +227,7 @@ } // Loop through ordered frame numbers - vector::iterator itr_ordered; + std::vector::iterator itr_ordered; for(itr_ordered = ordered_frame_numbers.begin(); itr_ordered != ordered_frame_numbers.end();) { if (*itr_ordered >= start_frame_number && *itr_ordered <= end_frame_number) @@ -258,7 +253,7 @@ if (frames.count(frame_number)) { // Loop through frame numbers - deque::iterator itr; + std::deque::iterator itr; for(itr = frame_numbers.begin(); itr != frame_numbers.end(); ++itr) { if (*itr == frame_number) @@ -318,13 +313,13 @@ // Generate JSON string of this object -string CacheMemory::Json() { +std::string CacheMemory::Json() { // Return formatted string return JsonValue().toStyledString(); } -// Generate Json::JsonValue for this object +// Generate Json::Value for this object Json::Value CacheMemory::JsonValue() { // Process range data (if anything has changed) @@ -334,46 +329,37 @@ Json::Value root = CacheBase::JsonValue(); // get parent properties root["type"] = cache_type; - stringstream range_version_str; - range_version_str << range_version; - root["version"] = range_version_str.str(); + root["version"] = std::to_string(range_version); // Parse and append range data (if any) - Json::Value ranges; - Json::Reader reader; - bool success = reader.parse( json_ranges, ranges ); - if (success) + try { + const Json::Value ranges = openshot::stringToJson(json_ranges); root["ranges"] = ranges; + } catch (...) { } // return JsonValue return root; } // Load JSON string into this object -void CacheMemory::SetJson(string value) { - - // Parse JSON string into JSON objects - Json::Value root; - Json::Reader reader; - bool success = reader.parse( value, root ); - if (!success) - // Raise exception - throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); +void CacheMemory::SetJson(const std::string value) { try { + // Parse string to Json::Value + const Json::Value root = openshot::stringToJson(value); // Set all values that match SetJsonValue(root); } - catch (exception e) + catch (const std::exception& e) { // Error parsing JSON (or missing keys) - throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); } } -// Load Json::JsonValue into this object -void CacheMemory::SetJsonValue(Json::Value root) { +// Load Json::Value into this object +void CacheMemory::SetJsonValue(const Json::Value root) { // Close timeline before we do anything (this also removes all open and closing clips) Clear(); diff -Nru libopenshot-0.2.2+dfsg1/src/ChunkReader.cpp libopenshot-0.2.5+dfsg1/src/ChunkReader.cpp --- libopenshot-0.2.2+dfsg1/src/ChunkReader.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/ChunkReader.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for ChunkReader class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -26,10 +29,11 @@ */ #include "../include/ChunkReader.h" +#include "../include/FFmpegReader.h" using namespace openshot; -ChunkReader::ChunkReader(string path, ChunkVersion chunk_version) +ChunkReader::ChunkReader(std::string path, ChunkVersion chunk_version) : path(path), chunk_size(24 * 3), is_open(false), version(chunk_version), local_reader(NULL) { // Check if folder exists? @@ -41,13 +45,13 @@ previous_location.number = 0; previous_location.frame = 0; - // Open and Close the reader, to populate it's attributes (such as height, width, etc...) + // Open and Close the reader, to populate its attributes (such as height, width, etc...) Open(); Close(); } // Check if folder path existing -bool ChunkReader::does_folder_exist(string path) +bool ChunkReader::does_folder_exist(std::string path) { QDir dir(path.c_str()); return dir.exists(); @@ -57,12 +61,12 @@ void ChunkReader::load_json() { // Load path of chunk folder - string json_path = QDir::cleanPath(QString(path.c_str()) + QDir::separator() + "info.json").toStdString(); - stringstream json_string; + std::string json_path = QDir::cleanPath(QString(path.c_str()) + QDir::separator() + "info.json").toStdString(); + std::stringstream json_string; // Read the JSON file - ifstream myfile (json_path.c_str()); - string line = ""; + std::ifstream myfile (json_path.c_str()); + std::string line = ""; if (myfile.is_open()) { while (myfile.good()) @@ -75,8 +79,10 @@ // Parse JSON string into JSON objects Json::Value root; - Json::Reader reader; - bool success = reader.parse( json_string.str(), root ); + Json::CharReaderBuilder rbuilder; + + std::string errors; + bool success = Json::parseFromStream(rbuilder, json_string, &root, &errors); if (!success) // Raise exception throw InvalidJSON("Chunk folder could not be opened.", path); @@ -88,7 +94,7 @@ info.has_video = root["has_video"].asBool(); info.has_audio = root["has_audio"].asBool(); info.duration = root["duration"].asDouble(); - info.file_size = atoll(root["file_size"].asString().c_str()); + info.file_size = std::stoll(root["file_size"].asString()); info.height = root["height"].asInt(); info.width = root["width"].asInt(); info.pixel_format = root["pixel_format"].asInt(); @@ -100,7 +106,7 @@ info.display_ratio.num = root["display_ratio"]["num"].asInt(); info.display_ratio.den = root["display_ratio"]["den"].asInt(); info.vcodec = root["vcodec"].asString(); - info.video_length = atoll(root["video_length"].asString().c_str()); + info.video_length = std::stoll(root["video_length"].asString()); info.video_stream_index = root["video_stream_index"].asInt(); info.video_timebase.num = root["video_timebase"]["num"].asInt(); info.video_timebase.den = root["video_timebase"]["den"].asInt(); @@ -115,7 +121,7 @@ info.audio_timebase.den = root["audio_timebase"]["den"].asInt(); } - catch (exception e) + catch (const std::exception& e) { // Error parsing JSON (or missing keys) throw InvalidJSON("JSON could not be parsed (or is invalid).", path); @@ -164,10 +170,10 @@ } // get a formatted path of a specific chunk -string ChunkReader::get_chunk_path(int64_t chunk_number, string folder, string extension) +std::string ChunkReader::get_chunk_path(int64_t chunk_number, std::string folder, std::string extension) { // Create path of new chunk video - stringstream chunk_count_string; + std::stringstream chunk_count_string; chunk_count_string << chunk_number; QString padded_count = "%1"; //chunk_count_string.str().c_str(); padded_count = padded_count.arg(chunk_count_string.str().c_str(), 6, '0'); @@ -196,7 +202,7 @@ if (previous_location.number != location.number) { // Determine version of chunk - string folder_name = ""; + std::string folder_name = ""; switch (version) { case THUMBNAIL: @@ -211,12 +217,12 @@ } // Load path of chunk video - string chunk_video_path = get_chunk_path(location.number, folder_name, ".webm"); + std::string chunk_video_path = get_chunk_path(location.number, folder_name, ".webm"); // Close existing reader (if needed) if (local_reader) { - cout << "Close READER" << endl; + std::cout << "Close READER" << std::endl; // Close and delete old reader local_reader->Close(); delete local_reader; @@ -224,13 +230,12 @@ try { - cout << "Load READER: " << chunk_video_path << endl; + std::cout << "Load READER: " << chunk_video_path << std::endl; // Load new FFmpegReader local_reader = new FFmpegReader(chunk_video_path); - local_reader->enable_seek = false; // disable seeking local_reader->Open(); // open reader - } catch (InvalidFile) + } catch (const InvalidFile& e) { // Invalid Chunk (possibly it is not found) throw ChunkNotFound(path, requested_frame, location.number, location.frame); @@ -251,20 +256,20 @@ } // Generate JSON string of this object -string ChunkReader::Json() { +std::string ChunkReader::Json() const { // Return formatted string return JsonValue().toStyledString(); } -// Generate Json::JsonValue for this object -Json::Value ChunkReader::JsonValue() { +// Generate Json::Value for this object +Json::Value ChunkReader::JsonValue() const { // Create root json object Json::Value root = ReaderBase::JsonValue(); // get parent properties root["type"] = "ChunkReader"; root["path"] = path; - stringstream chunk_size_stream; + std::stringstream chunk_size_stream; chunk_size_stream << chunk_size; root["chunk_size"] = chunk_size_stream.str(); root["chunk_version"] = version; @@ -274,30 +279,23 @@ } // Load JSON string into this object -void ChunkReader::SetJson(string value) { - - // Parse JSON string into JSON objects - Json::Value root; - Json::Reader reader; - bool success = reader.parse( value, root ); - if (!success) - // Raise exception - throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); +void ChunkReader::SetJson(const std::string value) { try { + const Json::Value root = openshot::stringToJson(value); // Set all values that match SetJsonValue(root); } - catch (exception e) + catch (const std::exception& e) { // Error parsing JSON (or missing keys) - throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); } } -// Load Json::JsonValue into this object -void ChunkReader::SetJsonValue(Json::Value root) { +// Load Json::Value into this object +void ChunkReader::SetJsonValue(const Json::Value root) { // Set parent data ReaderBase::SetJsonValue(root); @@ -306,7 +304,7 @@ if (!root["path"].isNull()) path = root["path"].asString(); if (!root["chunk_size"].isNull()) - chunk_size = atoll(root["chunk_size"].asString().c_str()); + chunk_size = std::stoll(root["chunk_size"].asString()); if (!root["chunk_version"].isNull()) version = (ChunkVersion) root["chunk_version"].asInt(); diff -Nru libopenshot-0.2.2+dfsg1/src/ChunkWriter.cpp libopenshot-0.2.5+dfsg1/src/ChunkWriter.cpp --- libopenshot-0.2.2+dfsg1/src/ChunkWriter.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/ChunkWriter.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for ChunkWriter class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -29,7 +32,7 @@ using namespace openshot; -ChunkWriter::ChunkWriter(string path, ReaderBase *reader) : +ChunkWriter::ChunkWriter(std::string path, ReaderBase *reader) : local_reader(reader), path(path), chunk_size(24*3), chunk_count(1), frame_count(1), is_writing(false), default_extension(".webm"), default_vcodec("libvpx"), default_acodec("libvorbis"), last_frame_needed(false), is_open(false) { @@ -51,10 +54,10 @@ } // get a formatted path of a specific chunk -string ChunkWriter::get_chunk_path(int64_t chunk_number, string folder, string extension) +std::string ChunkWriter::get_chunk_path(int64_t chunk_number, std::string folder, std::string extension) { // Create path of new chunk video - stringstream chunk_count_string; + std::stringstream chunk_count_string; chunk_count_string << chunk_number; QString padded_count = "%1"; //chunk_count_string.str().c_str(); padded_count = padded_count.arg(chunk_count_string.str().c_str(), 6, '0'); @@ -154,9 +157,9 @@ // Write the frames once it reaches the correct chunk size if (frame_count % chunk_size == 0 && frame_count >= chunk_size) { - cout << "Done with chunk" << endl; - cout << "frame_count: " << frame_count << endl; - cout << "chunk_size: " << chunk_size << endl; + std::cout << "Done with chunk" << std::endl; + std::cout << "frame_count: " << frame_count << std::endl; + std::cout << "chunk_size: " << chunk_size << std::endl; // Pad an additional 12 frames for (int z = 0; z<12; z++) @@ -226,9 +229,9 @@ // Write the frames once it reaches the correct chunk size if (is_writing) { - cout << "Final chunk" << endl; - cout << "frame_count: " << frame_count << endl; - cout << "chunk_size: " << chunk_size << endl; + std::cout << "Final chunk" << std::endl; + std::cout << "frame_count: " << frame_count << std::endl; + std::cout << "chunk_size: " << chunk_size << std::endl; // Pad an additional 12 frames for (int z = 0; z<12; z++) @@ -271,17 +274,17 @@ void ChunkWriter::write_json_meta_data() { // Load path of chunk folder - string json_path = QDir::cleanPath(QString(path.c_str()) + QDir::separator() + "info.json").toStdString(); + std::string json_path = QDir::cleanPath(QString(path.c_str()) + QDir::separator() + "info.json").toStdString(); // Write JSON file - ofstream myfile; + std::ofstream myfile; myfile.open (json_path.c_str()); - myfile << local_reader->Json() << endl; + myfile << local_reader->Json() << std::endl; myfile.close(); } // check for chunk folder -void ChunkWriter::create_folder(string path) +void ChunkWriter::create_folder(std::string path) { QDir dir(path.c_str()); if (!dir.exists()) { @@ -300,5 +303,3 @@ { is_open = true; } - - diff -Nru libopenshot-0.2.2+dfsg1/src/ClipBase.cpp libopenshot-0.2.5+dfsg1/src/ClipBase.cpp --- libopenshot-0.2.2+dfsg1/src/ClipBase.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/ClipBase.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for EffectBase class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -29,8 +32,8 @@ using namespace openshot; -// Generate Json::JsonValue for this object -Json::Value ClipBase::JsonValue() { +// Generate Json::Value for this object +Json::Value ClipBase::JsonValue() const { // Create root json object Json::Value root; @@ -45,8 +48,8 @@ return root; } -// Load Json::JsonValue into this object -void ClipBase::SetJsonValue(Json::Value root) { +// Load Json::Value into this object +void ClipBase::SetJsonValue(const Json::Value root) { // Set data from Json (if key is found) if (!root["id"].isNull()) @@ -62,10 +65,10 @@ } // Generate JSON for a property -Json::Value ClipBase::add_property_json(string name, float value, string type, string memo, Keyframe* keyframe, float min_value, float max_value, bool readonly, int64_t requested_frame) { +Json::Value ClipBase::add_property_json(std::string name, float value, std::string type, std::string memo, const Keyframe* keyframe, float min_value, float max_value, bool readonly, int64_t requested_frame) const { // Requested Point - Point requested_point(requested_frame, requested_frame); + const Point requested_point(requested_frame, requested_frame); // Create JSON Object Json::Value prop = Json::Value(Json::objectValue); @@ -98,7 +101,7 @@ return prop; } -Json::Value ClipBase::add_property_choice_json(string name, int value, int selected_value) { +Json::Value ClipBase::add_property_choice_json(std::string name, int value, int selected_value) const { // Create choice Json::Value new_choice = Json::Value(Json::objectValue); @@ -108,4 +111,4 @@ // return JsonValue return new_choice; -} \ No newline at end of file +} diff -Nru libopenshot-0.2.2+dfsg1/src/Clip.cpp libopenshot-0.2.5+dfsg1/src/Clip.cpp --- libopenshot-0.2.2+dfsg1/src/Clip.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/Clip.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for Clip class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -26,6 +29,15 @@ */ #include "../include/Clip.h" +#include "../include/FFmpegReader.h" +#include "../include/FrameMapper.h" +#ifdef USE_IMAGEMAGICK + #include "../include/ImageReader.h" + #include "../include/TextReader.h" +#endif +#include "../include/QtImageReader.h" +#include "../include/ChunkReader.h" +#include "../include/DummyReader.h" using namespace openshot; @@ -67,9 +79,9 @@ wave_color = Color((unsigned char)0, (unsigned char)123, (unsigned char)255, (unsigned char)255); // Init crop settings - crop_gravity = GRAVITY_CENTER; - crop_width = Keyframe(-1.0); - crop_height = Keyframe(-1.0); + crop_gravity = GRAVITY_TOP_LEFT; + crop_width = Keyframe(1.0); + crop_height = Keyframe(1.0); crop_x = Keyframe(0.0); crop_y = Keyframe(0.0); @@ -92,18 +104,15 @@ // Init audio and video overrides has_audio = Keyframe(-1.0); has_video = Keyframe(-1.0); - - // Default pointers - manage_reader = false; } // Init reader's rotation (if any) void Clip::init_reader_rotation() { // Only init rotation from reader when needed - if (rotation.Points.size() > 1) + if (rotation.GetCount() > 1) // Do nothing if more than 1 rotation Point return; - else if (rotation.Points.size() == 1 && rotation.GetValue(1) != 0.0) + else if (rotation.GetCount() == 1 && rotation.GetValue(1) != 0.0) // Do nothing if 1 Point, and it's not the default value return; @@ -114,7 +123,7 @@ try { float rotate_metadata = strtof(reader->info.metadata["rotate"].c_str(), 0); rotation = Keyframe(rotate_metadata); - } catch (exception e) {} + } catch (const std::exception& e) {} } else // Default no rotation @@ -122,14 +131,14 @@ } // Default Constructor for a clip -Clip::Clip() : reader(NULL), resampler(NULL), audio_cache(NULL) +Clip::Clip() : resampler(NULL), audio_cache(NULL), reader(NULL), allocated_reader(NULL) { // Init all default settings init_settings(); } // Constructor with reader -Clip::Clip(ReaderBase* new_reader) : reader(new_reader), resampler(NULL), audio_cache(NULL) +Clip::Clip(ReaderBase* new_reader) : resampler(NULL), audio_cache(NULL), reader(new_reader), allocated_reader(NULL) { // Init all default settings init_settings(); @@ -143,13 +152,13 @@ } // Constructor with filepath -Clip::Clip(string path) : reader(NULL), resampler(NULL), audio_cache(NULL) +Clip::Clip(std::string path) : resampler(NULL), audio_cache(NULL), reader(NULL), allocated_reader(NULL) { // Init all default settings init_settings(); // Get file extension (and convert to lower case) - string ext = get_file_extension(path); + std::string ext = get_file_extension(path); transform(ext.begin(), ext.end(), ext.begin(), ::tolower); // Determine if common video formats @@ -185,7 +194,7 @@ // Update duration if (reader) { End(reader->info.duration); - manage_reader = true; + allocated_reader = reader; init_reader_rotation(); } } @@ -194,9 +203,9 @@ Clip::~Clip() { // Delete the reader if clip created it - if (manage_reader && reader) { - delete reader; - reader = NULL; + if (allocated_reader) { + delete allocated_reader; + allocated_reader = NULL; } // Close the resampler @@ -212,6 +221,9 @@ // set reader pointer reader = new_reader; + // set parent + reader->SetClip(this); + // Init rotation (if any) init_reader_rotation(); } @@ -223,7 +235,7 @@ return reader; else // Throw error if reader not initialized - throw ReaderClosed("No Reader has been initialized for this Clip. Call Reader(*reader) before calling this method.", ""); + throw ReaderClosed("No Reader has been initialized for this Clip. Call Reader(*reader) before calling this method."); } // Open the internal reader @@ -240,28 +252,28 @@ } else // Throw error if reader not initialized - throw ReaderClosed("No Reader has been initialized for this Clip. Call Reader(*reader) before calling this method.", ""); + throw ReaderClosed("No Reader has been initialized for this Clip. Call Reader(*reader) before calling this method."); } // Close the internal reader void Clip::Close() { if (reader) { - ZmqLogger::Instance()->AppendDebugMethod("Clip::Close", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Clip::Close"); // Close the reader reader->Close(); } else // Throw error if reader not initialized - throw ReaderClosed("No Reader has been initialized for this Clip. Call Reader(*reader) before calling this method.", ""); + throw ReaderClosed("No Reader has been initialized for this Clip. Call Reader(*reader) before calling this method."); } // Get end position of clip (trim end of video), which can be affected by the time curve. -float Clip::End() +float Clip::End() const { - // if a time curve is present, use it's length - if (time.Points.size() > 1) + // if a time curve is present, use its length + if (time.GetCount() > 1) { // Determine the FPS fo this clip float fps = 24.0; @@ -270,7 +282,7 @@ fps = reader->info.fps.ToFloat(); else // Throw error if reader not initialized - throw ReaderClosed("No Reader has been initialized for this Clip. Call Reader(*reader) before calling this method.", ""); + throw ReaderClosed("No Reader has been initialized for this Clip. Call Reader(*reader) before calling this method."); return float(time.GetLength()) / fps; } @@ -302,8 +314,8 @@ // Is a time map detected int64_t new_frame_number = requested_frame; int64_t time_mapped_number = adjust_frame_number_minimum(time.GetLong(requested_frame)); - if (time.Values.size() > 1) - new_frame_number = time_mapped_number; + if (time.GetLength() > 1) + new_frame_number = time_mapped_number; // Now that we have re-mapped what frame number is needed, go and get the frame pointer std::shared_ptr original_frame; @@ -328,21 +340,21 @@ frame->AddAudio(true, channel, 0, original_frame->GetAudioSamples(channel), original_frame->GetAudioSamplesCount(), 1.0); // Get time mapped frame number (used to increase speed, change direction, etc...) - std::shared_ptr new_frame = get_time_mapped_frame(frame, requested_frame); + get_time_mapped_frame(frame, requested_frame); // Apply effects to the frame (if any) - apply_effects(new_frame); + apply_effects(frame); // Return processed 'frame' - return new_frame; + return frame; } else // Throw error if reader not initialized - throw ReaderClosed("No Reader has been initialized for this Clip. Call Reader(*reader) before calling this method.", ""); + throw ReaderClosed("No Reader has been initialized for this Clip. Call Reader(*reader) before calling this method."); } // Get file extension -string Clip::get_file_extension(string path) +std::string Clip::get_file_extension(std::string path) { // return last part of path return path.substr(path.find_last_of(".") + 1); @@ -355,7 +367,7 @@ int channels = buffer->getNumChannels(); // Reverse array (create new buffer to hold the reversed version) - AudioSampleBuffer *reversed = new juce::AudioSampleBuffer(channels, number_of_samples); + juce::AudioSampleBuffer *reversed = new juce::AudioSampleBuffer(channels, number_of_samples); reversed->clear(); for (int channel = 0; channel < channels; channel++) @@ -377,18 +389,17 @@ } // Adjust the audio and image of a time mapped frame -std::shared_ptr Clip::get_time_mapped_frame(std::shared_ptr frame, int64_t frame_number) +void Clip::get_time_mapped_frame(std::shared_ptr frame, int64_t frame_number) { // Check for valid reader if (!reader) // Throw error if reader not initialized - throw ReaderClosed("No Reader has been initialized for this Clip. Call Reader(*reader) before calling this method.", ""); + throw ReaderClosed("No Reader has been initialized for this Clip. Call Reader(*reader) before calling this method."); // Check for a valid time map curve - if (time.Values.size() > 1) + if (time.GetLength() > 1) { - const GenericScopedLock lock(getFrameCriticalSection); - std::shared_ptr new_frame; + const GenericScopedLock lock(getFrameCriticalSection); // create buffer and resampler juce::AudioSampleBuffer *samples = NULL; @@ -396,14 +407,7 @@ resampler = new AudioResampler(); // Get new frame number - int new_frame_number = adjust_frame_number_minimum(round(time.GetValue(frame_number))); - - // Create a new frame - int samples_in_frame = Frame::GetSamplesPerFrame(new_frame_number, reader->info.fps, reader->info.sample_rate, frame->GetAudioChannelsCount()); - new_frame = std::make_shared(new_frame_number, 1, 1, "#000000", samples_in_frame, frame->GetAudioChannelsCount()); - - // Copy the image from the new frame - new_frame->AddImage(std::shared_ptr(new QImage(*GetOrCreateFrame(new_frame_number)->GetImage()))); + int new_frame_number = frame->number; // Get delta (difference in previous Y value) int delta = int(round(time.GetDelta(frame_number))); @@ -419,7 +423,7 @@ if (time.GetRepeatFraction(frame_number).den > 1) { // SLOWING DOWN AUDIO // Resample data, and return new buffer pointer - AudioSampleBuffer *resampled_buffer = NULL; + juce::AudioSampleBuffer *resampled_buffer = NULL; int resampled_buffer_size = 0; // SLOW DOWN audio (split audio) @@ -451,7 +455,7 @@ start -= 1; for (int channel = 0; channel < channels; channel++) // Add new (slower) samples, to the frame object - new_frame->AddAudio(true, channel, 0, resampled_buffer->getReadPointer(channel, start), + frame->AddAudio(true, channel, 0, resampled_buffer->getReadPointer(channel, start), number_of_samples, 1.0f); // Clean up @@ -478,7 +482,7 @@ delta_frame <= new_frame_number; delta_frame++) { // buffer to hold detal samples int number_of_delta_samples = GetOrCreateFrame(delta_frame)->GetAudioSamplesCount(); - AudioSampleBuffer *delta_samples = new juce::AudioSampleBuffer(channels, + juce::AudioSampleBuffer *delta_samples = new juce::AudioSampleBuffer(channels, number_of_delta_samples); delta_samples->clear(); @@ -522,7 +526,7 @@ delta_frame >= new_frame_number; delta_frame--) { // buffer to hold delta samples int number_of_delta_samples = GetOrCreateFrame(delta_frame)->GetAudioSamplesCount(); - AudioSampleBuffer *delta_samples = new juce::AudioSampleBuffer(channels, + juce::AudioSampleBuffer *delta_samples = new juce::AudioSampleBuffer(channels, number_of_delta_samples); delta_samples->clear(); @@ -553,13 +557,13 @@ resampler->SetBuffer(samples, float(start) / float(number_of_samples)); // Resample data, and return new buffer pointer - AudioSampleBuffer *buffer = resampler->GetResampledBuffer(); + juce::AudioSampleBuffer *buffer = resampler->GetResampledBuffer(); int resampled_buffer_size = buffer->getNumSamples(); // Add the newly resized audio samples to the current frame for (int channel = 0; channel < channels; channel++) // Add new (slower) samples, to the frame object - new_frame->AddAudio(true, channel, 0, buffer->getReadPointer(channel), number_of_samples, 1.0f); + frame->AddAudio(true, channel, 0, buffer->getReadPointer(channel), number_of_samples, 1.0f); // Clean up buffer = NULL; @@ -580,7 +584,7 @@ // Add reversed samples to the frame object for (int channel = 0; channel < channels; channel++) - new_frame->AddAudio(true, channel, 0, samples->getReadPointer(channel), number_of_samples, 1.0f); + frame->AddAudio(true, channel, 0, samples->getReadPointer(channel), number_of_samples, 1.0f); } @@ -588,13 +592,7 @@ delete samples; samples = NULL; } - - // Return new time mapped frame - return new_frame; - - } else - // Use original frame - return frame; + } } // Adjust frame number minimum value @@ -618,36 +616,7 @@ try { // Debug output - ZmqLogger::Instance()->AppendDebugMethod("Clip::GetOrCreateFrame (from reader)", "number", number, "samples_in_frame", samples_in_frame, "", -1, "", -1, "", -1, "", -1); - - // Determine the max size of this clips source image (based on the timeline's size, the scaling mode, - // and the scaling keyframes). This is a performance improvement, to keep the images as small as possible, - // without losing quality. NOTE: We cannot go smaller than the timeline itself, or the add_layer timeline - // method will scale it back to timeline size before scaling it smaller again. This needs to be fixed in - // the future. - if (scale == SCALE_FIT || scale == SCALE_STRETCH) { - // Best fit or Stretch scaling (based on max timeline size * scaling keyframes) - float max_scale_x = scale_x.GetMaxPoint().co.Y; - float max_scale_y = scale_y.GetMaxPoint().co.Y; - reader->SetMaxSize(max(float(max_width), max_width * max_scale_x), max(float(max_height), max_height * max_scale_y)); - - } else if (scale == SCALE_CROP) { - // Cropping scale mode (based on max timeline size * cropped size * scaling keyframes) - float max_scale_x = scale_x.GetMaxPoint().co.Y; - float max_scale_y = scale_y.GetMaxPoint().co.Y; - QSize width_size(max_width * max_scale_x, round(max_width / (float(reader->info.width) / float(reader->info.height)))); - QSize height_size(round(max_height / (float(reader->info.height) / float(reader->info.width))), max_height * max_scale_y); - - // respect aspect ratio - if (width_size.width() >= max_width && width_size.height() >= max_height) - reader->SetMaxSize(max(max_width, width_size.width()), max(max_height, width_size.height())); - else - reader->SetMaxSize(max(max_width, height_size.width()), max(max_height, height_size.height())); - - } else { - // No scaling, use original image size (slower) - reader->SetMaxSize(0, 0); - } + ZmqLogger::Instance()->AppendDebugMethod("Clip::GetOrCreateFrame (from reader)", "number", number, "samples_in_frame", samples_in_frame); // Attempt to get a frame (but this could fail if a reader has just been closed) new_frame = reader->GetFrame(number); @@ -665,7 +634,7 @@ } // Debug output - ZmqLogger::Instance()->AppendDebugMethod("Clip::GetOrCreateFrame (create blank)", "number", number, "samples_in_frame", samples_in_frame, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Clip::GetOrCreateFrame (create blank)", "number", number, "samples_in_frame", samples_in_frame); // Create blank frame new_frame = std::make_shared(number, reader->info.width, reader->info.height, "#000000", samples_in_frame, reader->info.channels); @@ -676,14 +645,14 @@ } // Generate JSON string of this object -string Clip::Json() { +std::string Clip::Json() const { // Return formatted string return JsonValue().toStyledString(); } // Get all properties for a specific frame -string Clip::PropertiesJSON(int64_t requested_frame) { +std::string Clip::PropertiesJSON(int64_t requested_frame) const { // Generate JSON properties list Json::Value root; @@ -747,6 +716,19 @@ root["has_audio"] = add_property_json("Enable Audio", has_audio.GetValue(requested_frame), "int", "", &has_audio, -1, 1.0, false, requested_frame); root["has_video"] = add_property_json("Enable Video", has_video.GetValue(requested_frame), "int", "", &has_video, -1, 1.0, false, requested_frame); + // Add enable audio/video choices (dropdown style) + root["has_audio"]["choices"].append(add_property_choice_json("Auto", -1, has_audio.GetValue(requested_frame))); + root["has_audio"]["choices"].append(add_property_choice_json("Off", 0, has_audio.GetValue(requested_frame))); + root["has_audio"]["choices"].append(add_property_choice_json("On", 1, has_audio.GetValue(requested_frame))); + root["has_video"]["choices"].append(add_property_choice_json("Auto", -1, has_video.GetValue(requested_frame))); + root["has_video"]["choices"].append(add_property_choice_json("Off", 0, has_video.GetValue(requested_frame))); + root["has_video"]["choices"].append(add_property_choice_json("On", 1, has_video.GetValue(requested_frame))); + + root["crop_x"] = add_property_json("Crop X", crop_x.GetValue(requested_frame), "float", "", &crop_x, -1.0, 1.0, false, requested_frame); + root["crop_y"] = add_property_json("Crop Y", crop_y.GetValue(requested_frame), "float", "", &crop_y, -1.0, 1.0, false, requested_frame); + root["crop_width"] = add_property_json("Crop Width", crop_width.GetValue(requested_frame), "float", "", &crop_width, 0.0, 1.0, false, requested_frame); + root["crop_height"] = add_property_json("Crop Height", crop_height.GetValue(requested_frame), "float", "", &crop_height, 0.0, 1.0, false, requested_frame); + root["wave_color"] = add_property_json("Wave Color", 0.0, "color", "", &wave_color.red, 0, 255, false, requested_frame); root["wave_color"]["red"] = add_property_json("Red", wave_color.red.GetValue(requested_frame), "float", "", &wave_color.red, 0, 255, false, requested_frame); root["wave_color"]["blue"] = add_property_json("Blue", wave_color.blue.GetValue(requested_frame), "float", "", &wave_color.blue, 0, 255, false, requested_frame); @@ -757,8 +739,8 @@ return root.toStyledString(); } -// Generate Json::JsonValue for this object -Json::Value Clip::JsonValue() { +// Generate Json::Value for this object +Json::Value Clip::JsonValue() const { // Create root json object Json::Value root = ClipBase::JsonValue(); // get parent properties @@ -800,11 +782,8 @@ root["effects"] = Json::Value(Json::arrayValue); // loop through effects - list::iterator effect_itr; - for (effect_itr=effects.begin(); effect_itr != effects.end(); ++effect_itr) + for (auto existing_effect : effects) { - // Get clip object from the iterator - EffectBase *existing_effect = (*effect_itr); root["effects"].append(existing_effect->JsonValue()); } @@ -816,30 +795,24 @@ } // Load JSON string into this object -void Clip::SetJson(string value) { +void Clip::SetJson(const std::string value) { // Parse JSON string into JSON objects - Json::Value root; - Json::Reader reader; - bool success = reader.parse( value, root ); - if (!success) - // Raise exception - throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); - try { + const Json::Value root = openshot::stringToJson(value); // Set all values that match SetJsonValue(root); } - catch (exception e) + catch (const std::exception& e) { // Error parsing JSON (or missing keys) - throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); } } -// Load Json::JsonValue into this object -void Clip::SetJsonValue(Json::Value root) { +// Load Json::Value into this object +void Clip::SetJsonValue(const Json::Value root) { // Set parent data ClipBase::SetJsonValue(root); @@ -917,16 +890,13 @@ effects.clear(); // loop through effects - for (int x = 0; x < root["effects"].size(); x++) { - // Get each effect - Json::Value existing_effect = root["effects"][x]; - + for (const auto existing_effect : root["effects"]) { // Create Effect EffectBase *e = NULL; if (!existing_effect["type"].isNull()) { // Create instance of effect - if (e = EffectInfo().CreateEffect(existing_effect["type"].asString())) { + if ( (e = EffectInfo().CreateEffect(existing_effect["type"].asString())) ) { // Load Json into Effect e->SetJsonValue(existing_effect); @@ -955,7 +925,7 @@ } // Create new reader (and load properties) - string type = root["reader"]["type"].asString(); + std::string type = root["reader"]["type"].asString(); if (type == "FFmpegReader") { @@ -996,9 +966,11 @@ reader->SetJsonValue(root["reader"]); } - // mark as managed reader - if (reader) - manage_reader = true; + // mark as managed reader and set parent + if (reader) { + reader->SetClip(this); + allocated_reader = reader; + } // Re-Open reader (if needed) if (already_open) @@ -1035,12 +1007,8 @@ std::shared_ptr Clip::apply_effects(std::shared_ptr frame) { // Find Effects at this position and layer - list::iterator effect_itr; - for (effect_itr=effects.begin(); effect_itr != effects.end(); ++effect_itr) + for (auto effect : effects) { - // Get clip object from the iterator - EffectBase *effect = (*effect_itr); - // Apply the effect to this frame frame = effect->GetFrame(frame, frame->number); diff -Nru libopenshot-0.2.2+dfsg1/src/CMakeLists.txt libopenshot-0.2.5+dfsg1/src/CMakeLists.txt --- libopenshot-0.2.2+dfsg1/src/CMakeLists.txt 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/CMakeLists.txt 2020-03-03 08:00:06.000000000 +0000 @@ -4,10 +4,10 @@ # # @section LICENSE # -# Copyright (c) 2008-2014 OpenShot Studios, LLC +# Copyright (c) 2008-2019 OpenShot Studios, LLC # . This file is part of -# OpenShot Library (libopenshot), an open-source project dedicated to -# delivering high quality video editing and animation solutions to the +# OpenShot Library (libopenshot), an open-source project dedicated to +# delivering high quality video editing and animation solutions to the # world. For more information visit . # # OpenShot Library (libopenshot) is free software: you can redistribute it @@ -24,9 +24,9 @@ # along with OpenShot Library. If not, see . ################################################################################ -################ OPTIONS ################## -# Optional build settings for libopenshot -OPTION(USE_SYSTEM_JSONCPP "Use system installed JsonCpp" OFF) +# Collect and display summary of options/dependencies +include(FeatureSummary) + ################ WINDOWS ################## # Set some compiler options for Windows @@ -75,97 +75,20 @@ ENDIF (ImageMagick_FOUND) -################### FFMPEG ##################### -# Find FFmpeg libraries (used for video encoding / decoding) -FIND_PACKAGE(FFmpeg REQUIRED) - -# Include FFmpeg headers (needed for compile) -message('AVCODEC_FOUND: ${AVCODEC_FOUND}') -message('AVCODEC_INCLUDE_DIRS: ${AVCODEC_INCLUDE_DIRS}') -message('AVCODEC_LIBRARIES: ${AVCODEC_LIBRARIES}') - -IF (AVCODEC_FOUND) - include_directories(${AVCODEC_INCLUDE_DIRS}) -ENDIF (AVCODEC_FOUND) -IF (AVDEVICE_FOUND) - include_directories(${AVDEVICE_INCLUDE_DIRS}) -ENDIF (AVDEVICE_FOUND) -IF (AVFORMAT_FOUND) - include_directories(${AVFORMAT_INCLUDE_DIRS}) -ENDIF (AVFORMAT_FOUND) -IF (AVFILTER_FOUND) - include_directories(${AVFILTER_INCLUDE_DIRS}) -ENDIF (AVFILTER_FOUND) -IF (AVUTIL_FOUND) - include_directories(${AVUTIL_INCLUDE_DIRS}) -ENDIF (AVUTIL_FOUND) -IF (POSTPROC_FOUND) - include_directories(${POSTPROC_INCLUDE_DIRS}) -ENDIF (POSTPROC_FOUND) -IF (SWSCALE_FOUND) - include_directories(${SWSCALE_INCLUDE_DIRS}) -ENDIF (SWSCALE_FOUND) -IF (SWRESAMPLE_FOUND) - include_directories(${SWRESAMPLE_INCLUDE_DIRS}) -ENDIF (SWRESAMPLE_FOUND) -IF (AVRESAMPLE_FOUND) - include_directories(${AVRESAMPLE_INCLUDE_DIRS}) -ENDIF (AVRESAMPLE_FOUND) - ################# LIBOPENSHOT-AUDIO ################### # Find JUCE-based openshot Audio libraries -FIND_PACKAGE(OpenShotAudio REQUIRED) - -message('LIBOPENSHOT_AUDIO_INCLUDE_DIRS: ${LIBOPENSHOT_AUDIO_INCLUDE_DIRS}') +FIND_PACKAGE(OpenShotAudio 0.2.0 REQUIRED) # Include Juce headers (needed for compile) include_directories(${LIBOPENSHOT_AUDIO_INCLUDE_DIRS}) -################# QT5 ################### -# Find QT5 libraries -find_package(Qt5Widgets REQUIRED) -find_package(Qt5Core REQUIRED) -find_package(Qt5Gui REQUIRED) -find_package(Qt5Multimedia REQUIRED) -find_package(Qt5MultimediaWidgets REQUIRED) - -# Include Qt headers (needed for compile) -include_directories(${Qt5Widgets_INCLUDE_DIRS}) -include_directories(${Qt5Core_INCLUDE_DIRS}) -include_directories(${Qt5Gui_INCLUDE_DIRS}) -include_directories(${Qt5Multimedia_INCLUDE_DIRS}) -include_directories(${Qt5MultimediaWidgets_INCLUDE_DIRS}) - -add_definitions(${Qt5Widgets_DEFINITIONS}) -add_definitions(${Qt5Core_DEFINITIONS}) -add_definitions(${Qt5Gui_DEFINITIONS}) -add_definitions(${Qt5Multimedia_DEFINITIONS}) -add_definitions(${Qt5MultimediaWidgets_DEFINITIONS}) - -SET(QT_LIBRARIES ${Qt5Widgets_LIBRARIES} - ${Qt5Core_LIBRARIES} - ${Qt5Gui_LIBRARIES} - ${Qt5Multimedia_LIBRARIES} - ${Qt5MultimediaWidgets_LIBRARIES}) - -# Set compiler flags for Qt -set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${Qt5Widgets_EXECUTABLE_COMPILE_FLAGS} ") -set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${Qt5Core_EXECUTABLE_COMPILE_FLAGS} ") -set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${Qt5Gui_EXECUTABLE_COMPILE_FLAGS} ") -set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${Qt5Multimedia_EXECUTABLE_COMPILE_FLAGS} ") -set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${Qt5MultimediaWidgets_EXECUTABLE_COMPILE_FLAGS} ") -set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -g -ggdb ") - -# Manually moc Qt files -qt5_wrap_cpp(MOC_FILES ${QT_HEADER_FILES}) - ################# BLACKMAGIC DECKLINK ################### # Find BlackMagic DeckLinkAPI libraries IF (ENABLE_BLACKMAGIC) FIND_PACKAGE(BlackMagic) IF (BLACKMAGIC_FOUND) - # Include headers (needed for compile) + # Include Blackmagic headers (needed for compile) include_directories(${BLACKMAGIC_INCLUDE_DIR}) # define a global var (used in the C++) @@ -175,192 +98,309 @@ ENDIF (BLACKMAGIC_FOUND) ENDIF (ENABLE_BLACKMAGIC) -################### OPENMP ##################### -# Check for OpenMP (used for multi-core processing) -FIND_PACKAGE(OpenMP) +############### PROFILING ################# +#set(PROFILER "/usr/lib/libprofiler.so.0.3.2") +#set(PROFILER "/usr/lib/libtcmalloc.so.4") -if (OPENMP_FOUND) - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${OpenMP_CXX_FLAGS} ") -endif(OPENMP_FOUND) +if(CMAKE_VERSION VERSION_LESS 3.3) + # IWYU wasn't supported internally in 3.2 + set(ENABLE_IWYU FALSE) +endif() + +if(ENABLE_IWYU) + find_program(IWYU_PATH NAMES "iwyu" + DOC "include-what-you-use source code scanner executable") + if(IWYU_PATH) + if(IWYU_OPTS) + separate_arguments(IWYU_OPTS) + list(APPEND _iwyu_opts "-Xiwyu" ${IWYU_OPTS}) + endif() + set(CMAKE_CXX_INCLUDE_WHAT_YOU_USE ${IWYU_PATH} ${_iwyu_opts}) + else() + set(ENABLE_IWYU FALSE) + endif() +endif() +add_feature_info("IWYU (include-what-you-use)" ENABLE_IWYU "Scan all source files with 'iwyu'") + +# Main library sources +set(OPENSHOT_SOURCES + AudioBufferSource.cpp + AudioReaderSource.cpp + AudioResampler.cpp + CacheBase.cpp + CacheDisk.cpp + CacheMemory.cpp + ChunkReader.cpp + ChunkWriter.cpp + Color.cpp + Clip.cpp + ClipBase.cpp + Coordinate.cpp + CrashHandler.cpp + DummyReader.cpp + ReaderBase.cpp + RendererBase.cpp + WriterBase.cpp + EffectBase.cpp + EffectInfo.cpp + FFmpegReader.cpp + FFmpegWriter.cpp + Fraction.cpp + Frame.cpp + FrameMapper.cpp + Json.cpp + KeyFrame.cpp + OpenShotVersion.cpp + ZmqLogger.cpp + PlayerBase.cpp + Point.cpp + Profiles.cpp + QtHtmlReader.cpp + QtImageReader.cpp + QtPlayer.cpp + QtTextReader.cpp + Settings.cpp + Timeline.cpp) + +# Video effects +set(EFFECTS_SOURCES + effects/Bars.cpp + effects/Blur.cpp + effects/Brightness.cpp + effects/ChromaKey.cpp + effects/ColorShift.cpp + effects/Crop.cpp + effects/Deinterlace.cpp + effects/Hue.cpp + effects/Mask.cpp + effects/Negate.cpp + effects/Pixelate.cpp + effects/Saturation.cpp + effects/Shift.cpp + effects/Wave.cpp) + +# Qt video player components +set(QT_PLAYER_SOURCES + Qt/AudioPlaybackThread.cpp + Qt/PlayerDemo.cpp + Qt/PlayerPrivate.cpp + Qt/VideoCacheThread.cpp + Qt/VideoPlaybackThread.cpp + Qt/VideoRenderer.cpp + Qt/VideoRenderWidget.cpp) -################### ZEROMQ ##################### -# Find ZeroMQ library (used for socket communication & logging) -FIND_PACKAGE(ZMQ REQUIRED) +# Get list of MOC'able headers +file(GLOB_RECURSE OPENSHOT_QT_HEADERS ${CMAKE_SOURCE_DIR}/include/Qt/*.h) + +# Disable RPATH +SET(CMAKE_MACOSX_RPATH 0) + +############### CREATE LIBRARY ################# +# Create shared openshot library +add_library(openshot SHARED) + +target_sources(openshot PRIVATE + ${OPENSHOT_SOURCES} + ${EFFECTS_SOURCES} + ${QT_PLAYER_SOURCES} + ${OPENSHOT_QT_HEADERS} + ) + +# Set SONAME and other library properties +set_target_properties(openshot PROPERTIES + AUTOMOC ON + VERSION ${PROJECT_VERSION} + SOVERSION ${PROJECT_SO_VERSION} + INSTALL_NAME_DIR "${CMAKE_INSTALL_PREFIX}/lib" + ) + +# Add optional ImageMagic-dependent sources +if(ImageMagick_FOUND) + target_sources(openshot PRIVATE + ImageReader.cpp + ImageWriter.cpp + TextReader.cpp) +endif() + +# BlackMagic related files +if(BLACKMAGIC_FOUND) + target_sources(openshot PRIVATE + DecklinkInput.cpp + DecklinkReader.cpp + DecklinkOutput.cpp + DecklinkWriter.cpp) +endif() + +# Location of our includes, both internally and when installed +target_include_directories(openshot + PRIVATE + ${CMAKE_SOURCE_DIR}/include + ${CMAKE_BINARY_DIR}/include + PUBLIC + $ + $ + $) -# Include ZeroMQ headers (needed for compile) -include_directories(${ZMQ_INCLUDE_DIRS}) ################### JSONCPP ##################### # Include jsoncpp headers (needed for JSON parsing) if (USE_SYSTEM_JSONCPP) - find_package(JsonCpp REQUIRED) - include_directories(${JSONCPP_INCLUDE_DIRS}) -else() - message("Using embedded JsonCpp") - include_directories("../thirdparty/jsoncpp/include") -endif(USE_SYSTEM_JSONCPP) - -############### PROFILING ################# -#set(PROFILER "/usr/lib/libprofiler.so.0.3.2") -#set(PROFILER "/usr/lib/libtcmalloc.so.4") + message(STATUS "Looking for system JsonCpp") + find_package(JsonCpp) + if (JSONCPP_FOUND AND NOT TARGET jsoncpp_lib) + # Create the expected target, for older installs that don't + add_library(jsoncpp_lib INTERFACE) + target_include_directories(jsoncpp_lib INTERFACE + ${JSONCPP_INCLUDE_DIRS}) + target_link_libraries(jsoncpp_lib INTERFACE ${JSONCPP_LIBRARY}) + endif () +endif () + +if (NOT JSONCPP_FOUND AND NOT DISABLE_BUNDLED_JSONCPP) + message(STATUS "Using embedded JsonCpp (not found or USE_SYSTEM_JSONCPP disabled)") + if (NOT TARGET jsoncpp_lib) + add_library(jsoncpp_lib INTERFACE) + target_include_directories(jsoncpp_lib INTERFACE + "${PROJECT_SOURCE_DIR}/thirdparty/jsoncpp") + target_sources(jsoncpp_lib INTERFACE "${PROJECT_SOURCE_DIR}/thirdparty/jsoncpp/jsoncpp.cpp") + # Because this satisfies the requirement, an installed JsonCpp is optional + set_package_properties(JsonCpp PROPERTIES TYPE OPTIONAL) + endif () + add_feature_info("JsonCpp (embedded)" TRUE "JsonCpp will be compiled from the bundled sources") +endif () + +if (JSONCPP_FOUND) + # JsonCpp is actually required, even though we probe for it optionally + # (This tells feature_summary() to bail if it's not found, later) + set_package_properties(JsonCpp PROPERTIES TYPE REQUIRED) +endif () + +# If we found any usable JsonCpp, use it. Otherwise, bail. +if (TARGET jsoncpp_lib) + target_link_libraries(openshot PUBLIC jsoncpp_lib) +endif () -#### GET LIST OF EFFECT FILES #### -FILE(GLOB EFFECT_FILES "${CMAKE_CURRENT_SOURCE_DIR}/effects/*.cpp") +################# QT5 ################### +# Find QT5 libraries +set(_qt_components Widgets Core Gui Multimedia MultimediaWidgets) +find_package(Qt5 COMPONENTS ${_qt_components} REQUIRED) -#### GET LIST OF QT PLAYER FILES #### -FILE(GLOB QT_PLAYER_FILES "${CMAKE_CURRENT_SOURCE_DIR}/Qt/*.cpp") +foreach(_qt_comp IN LISTS _qt_components) + if(TARGET Qt5::${_qt_comp}) + target_link_libraries(openshot PUBLIC Qt5::${_qt_comp}) + endif() +endforeach() -############### SET LIBRARY SOURCE FILES ################# -SET ( OPENSHOT_SOURCE_FILES - AudioBufferSource.cpp - AudioReaderSource.cpp - AudioResampler.cpp - CacheBase.cpp - CacheDisk.cpp - CacheMemory.cpp - ChunkReader.cpp - ChunkWriter.cpp - Color.cpp - Clip.cpp - ClipBase.cpp - Coordinate.cpp - CrashHandler.cpp - DummyReader.cpp - ReaderBase.cpp - RendererBase.cpp - WriterBase.cpp - EffectBase.cpp - ${EFFECT_FILES} - EffectInfo.cpp - FFmpegReader.cpp - FFmpegWriter.cpp - Fraction.cpp - Frame.cpp - FrameMapper.cpp - KeyFrame.cpp - ZmqLogger.cpp - PlayerBase.cpp - Point.cpp - Profiles.cpp - QtImageReader.cpp - QtPlayer.cpp - Timeline.cpp - - # Qt Video Player - ${QT_PLAYER_FILES} - ${MOC_FILES}) - -IF (NOT USE_SYSTEM_JSONCPP) - # Third Party JSON Parser - SET ( OPENSHOT_SOURCE_FILES ${OPENSHOT_SOURCE_FILES} - ../thirdparty/jsoncpp/src/lib_json/json_reader.cpp - ../thirdparty/jsoncpp/src/lib_json/json_value.cpp - ../thirdparty/jsoncpp/src/lib_json/json_writer.cpp) -ENDIF (NOT USE_SYSTEM_JSONCPP) +################### FFMPEG ##################### +# Find FFmpeg libraries (used for video encoding / decoding) +FIND_PACKAGE(FFmpeg REQUIRED COMPONENTS avcodec avdevice avformat avutil swscale) -# ImageMagic related files -IF (ImageMagick_FOUND) - SET ( OPENSHOT_SOURCE_FILES ${OPENSHOT_SOURCE_FILES} - ImageReader.cpp - ImageWriter.cpp - TextReader.cpp) -ENDIF (ImageMagick_FOUND) +foreach(ff_comp avcodec avdevice avformat avfilter avutil postproc swscale swresample avresample) + if(TARGET FFmpeg::${ff_comp}) + target_link_libraries(openshot PUBLIC FFmpeg::${ff_comp}) + endif() +endforeach() + +################### Threads #################### +# Threading library -- uses IMPORTED target Threads::Threads (since CMake 3.1) +set(CMAKE_THREAD_PREFER_PTHREAD TRUE) +set(THREADS_PREFER_PTHREAD_FLAG TRUE) +find_package(Threads REQUIRED) +target_link_libraries(openshot PUBLIC Threads::Threads) -# BlackMagic related files -IF (BLACKMAGIC_FOUND) - SET ( OPENSHOT_SOURCE_FILES ${OPENSHOT_SOURCE_FILES} - DecklinkInput.cpp - DecklinkReader.cpp - DecklinkOutput.cpp - DecklinkWriter.cpp) -ENDIF (BLACKMAGIC_FOUND) +################### OPENMP ##################### +# Check for OpenMP (used for multi-core processing) +# OpenMP is required by FFmpegReader/Writer +find_package(OpenMP REQUIRED) -# Get list of headers -file(GLOB_RECURSE headers ${CMAKE_SOURCE_DIR}/include/*.h) +if(NOT TARGET OpenMP::OpenMP_CXX) + # Older CMake versions (< 3.9) don't create find targets. + add_library(OpenMP_TARGET INTERFACE) + add_library(OpenMP::OpenMP_CXX ALIAS OpenMP_TARGET) + target_compile_options(OpenMP_TARGET INTERFACE ${OpenMP_CXX_FLAGS}) + target_link_libraries(OpenMP_TARGET INTERFACE Threads::Threads) + target_link_libraries(OpenMP_TARGET INTERFACE ${OpenMP_CXX_FLAGS}) +endif() -# Disable RPATH -SET(CMAKE_MACOSX_RPATH 0) +target_link_libraries(openshot PUBLIC OpenMP::OpenMP_CXX) -############### CREATE LIBRARY ################# -# Create shared openshot library -add_library(openshot SHARED - ${OPENSHOT_SOURCE_FILES} - ${headers} ) +################### ZEROMQ ##################### +# Find ZeroMQ library (used for socket communication & logging) +find_package(ZeroMQ REQUIRED) # Creates libzmq target -# Set SONAME and other library properties -set_target_properties(openshot - PROPERTIES - VERSION ${PROJECT_VERSION} - SOVERSION ${SO_VERSION} - INSTALL_NAME_DIR "${CMAKE_INSTALL_PREFIX}/lib" - ) +# Some platforms package the header-only cppzmq C++ bindings separately, +# others (Ubuntu) bundle them in with libzmq itself +find_package(cppzmq QUIET) # Creates cppzmq target + +# Link ZeroMQ library +if (TARGET libzmq) + target_link_libraries(openshot PUBLIC libzmq) +endif() +# Include cppzmq headers, if not bundled into libzmq +if (TARGET cppzmq) + target_link_libraries(openshot PUBLIC cppzmq) +endif() + +################### RESVG ##################### +# Migrate some legacy variable names +if(DEFINED RESVGDIR AND NOT DEFINED RESVG_ROOT) + set(RESVG_ROOT ${RESVGDIR}) +endif() +if(DEFINED ENV{RESVGDIR} AND NOT DEFINED RESVG_ROOT) + set(RESVG_ROOT $ENV{RESVGDIR}) +endif() + +# Find resvg library (used for rendering svg files) +FIND_PACKAGE(RESVG) + +# Include resvg headers (optional SVG library) +if (TARGET RESVG::resvg) + #include_directories(${RESVG_INCLUDE_DIRS}) + target_link_libraries(openshot PUBLIC RESVG::resvg) + + target_compile_definitions(openshot PUBLIC "-DUSE_RESVG=1") + set(CMAKE_SWIG_FLAGS "-DUSE_RESVG=1") +endif() ############### LINK LIBRARY ################# -SET ( REQUIRED_LIBRARIES - ${LIBOPENSHOT_AUDIO_LIBRARIES} - ${QT_LIBRARIES} - ${PROFILER} - ${JSONCPP_LIBRARY} - ${ZMQ_LIBRARIES} - ) - -IF (AVCODEC_FOUND) - SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${AVCODEC_LIBRARIES} ) -ENDIF (AVCODEC_FOUND) -IF (AVDEVICE_FOUND) - SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${AVDEVICE_LIBRARIES} ) -ENDIF (AVDEVICE_FOUND) -IF (AVFORMAT_FOUND) - SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${AVFORMAT_LIBRARIES} ) -ENDIF (AVFORMAT_FOUND) -IF (AVFILTER_FOUND) - SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${AVFILTER_LIBRARIES} ) -ENDIF (AVFILTER_FOUND) -IF (AVUTIL_FOUND) - SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${AVUTIL_LIBRARIES} ) -ENDIF (AVUTIL_FOUND) -IF (POSTPROC_FOUND) - SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${POSTPROC_LIBRARIES} ) -ENDIF (POSTPROC_FOUND) -IF (SWSCALE_FOUND) - SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${SWSCALE_LIBRARIES} ) -ENDIF (SWSCALE_FOUND) -IF (SWRESAMPLE_FOUND) - SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${SWRESAMPLE_LIBRARIES} ) -ENDIF (SWRESAMPLE_FOUND) -IF (AVRESAMPLE_FOUND) - SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${AVRESAMPLE_LIBRARIES} ) -ENDIF (AVRESAMPLE_FOUND) - -IF (OPENMP_FOUND) - SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${OpenMP_CXX_FLAGS} ) -ENDIF (OPENMP_FOUND) - -IF (ImageMagick_FOUND) - SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${ImageMagick_LIBRARIES} ) -ENDIF (ImageMagick_FOUND) - -IF (BLACKMAGIC_FOUND) - SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${BLACKMAGIC_LIBRARY_DIR} ) -ENDIF (BLACKMAGIC_FOUND) +# Link remaining dependency libraries +target_link_libraries(openshot PUBLIC + ${LIBOPENSHOT_AUDIO_LIBRARIES} + ${PROFILER}) + +if(ImageMagick_FOUND) + target_link_libraries(openshot PUBLIC ${ImageMagick_LIBRARIES}) +endif() + +if(BLACKMAGIC_FOUND) + target_link_libraries(openshot PUBLIC ${BLACKMAGIC_LIBRARY_DIR}) +endif() -IF (WIN32) +if(WIN32) # Required for exception handling on Windows - SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} "imagehlp" "dbghelp" ) -ENDIF(WIN32) + target_link_libraries(openshot PUBLIC "imagehlp" "dbghelp" ) +endif() -# Link all referenced libraries -target_link_libraries(openshot ${REQUIRED_LIBRARIES}) - -############### CLI EXECUTABLE ################ +############### CLI EXECUTABLES ################ # Create test executable add_executable(openshot-example examples/Example.cpp) +# Define path to test input files +SET(TEST_MEDIA_PATH "${PROJECT_SOURCE_DIR}/src/examples/") +IF (WIN32) + STRING(REPLACE "/" "\\\\" TEST_MEDIA_PATH TEST_MEDIA_PATH) +ENDIF(WIN32) +target_compile_definitions(openshot-example PRIVATE + -DTEST_MEDIA_PATH="${TEST_MEDIA_PATH}" ) + # Link test executable to the new library target_link_libraries(openshot-example openshot) +add_executable(openshot-html-test examples/ExampleHtml.cpp) +target_link_libraries(openshot-html-test openshot Qt5::Gui) + ############### PLAYER EXECUTABLE ################ # Create test executable add_executable(openshot-player Qt/demo/main.cpp) @@ -381,14 +421,14 @@ ############### INCLUDE SWIG BINDINGS ################ add_subdirectory(bindings) - ############### INSTALL HEADERS & LIBRARY ################ set(LIB_INSTALL_DIR lib${LIB_SUFFIX}) # determine correct lib folder # Install primary library -INSTALL( TARGETS openshot +INSTALL(TARGETS openshot ARCHIVE DESTINATION ${LIB_INSTALL_DIR} LIBRARY DESTINATION ${LIB_INSTALL_DIR} + RUNTIME DESTINATION ${LIB_INSTALL_DIR} COMPONENT library ) INSTALL(DIRECTORY ${CMAKE_SOURCE_DIR}/include/ diff -Nru libopenshot-0.2.2+dfsg1/src/Color.cpp libopenshot-0.2.5+dfsg1/src/Color.cpp --- libopenshot-0.2.2+dfsg1/src/Color.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/Color.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for EffectBase class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -50,7 +53,7 @@ } // Constructor which takes a HEX color code -Color::Color(string color_hex) +Color::Color(std::string color_hex) { // Create a QColor from hex QColor color(QString::fromStdString(color_hex)); @@ -61,7 +64,7 @@ } // Get the HEX value of a color at a specific frame -string Color::GetColorHex(int64_t frame_number) { +std::string Color::GetColorHex(int64_t frame_number) { int r = red.GetInt(frame_number); int g = green.GetInt(frame_number); @@ -82,14 +85,14 @@ } // Generate JSON string of this object -string Color::Json() { +std::string Color::Json() const { // Return formatted string return JsonValue().toStyledString(); } -// Generate Json::JsonValue for this object -Json::Value Color::JsonValue() { +// Generate Json::Value for this object +Json::Value Color::JsonValue() const { // Create root json object Json::Value root; @@ -103,30 +106,24 @@ } // Load JSON string into this object -void Color::SetJson(string value) { +void Color::SetJson(const std::string value) { // Parse JSON string into JSON objects - Json::Value root; - Json::Reader reader; - bool success = reader.parse( value, root ); - if (!success) - // Raise exception - throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); - try { + const Json::Value root = openshot::stringToJson(value); // Set all values that match SetJsonValue(root); } - catch (exception e) + catch (const std::exception& e) { // Error parsing JSON (or missing keys) - throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); } } -// Load Json::JsonValue into this object -void Color::SetJsonValue(Json::Value root) { +// Load Json::Value into this object +void Color::SetJsonValue(const Json::Value root) { // Set data from Json (if key is found) if (!root["red"].isNull()) diff -Nru libopenshot-0.2.2+dfsg1/src/Coordinate.cpp libopenshot-0.2.5+dfsg1/src/Coordinate.cpp --- libopenshot-0.2.2+dfsg1/src/Coordinate.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/Coordinate.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for Coordinate class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -32,24 +35,24 @@ // Default constructor for a coordinate, which defaults the X and Y to zero (0,0) Coordinate::Coordinate() : - X(0), Y(0), increasing(true), repeated(1,1), delta(0.0) { + X(0), Y(0) { } // Constructor which also allows the user to set the X and Y Coordinate::Coordinate(double x, double y) : - X(x), Y(y), increasing(true), repeated(1,1), delta(0.0) { + X(x), Y(y) { } // Generate JSON string of this object -string Coordinate::Json() { +std::string Coordinate::Json() const { // Return formatted string return JsonValue().toStyledString(); } -// Generate Json::JsonValue for this object -Json::Value Coordinate::JsonValue() { +// Generate Json::Value for this object +Json::Value Coordinate::JsonValue() const { // Create root json object Json::Value root; @@ -66,45 +69,28 @@ } // Load JSON string into this object -void Coordinate::SetJson(string value) { +void Coordinate::SetJson(const std::string value) { // Parse JSON string into JSON objects - Json::Value root; - Json::Reader reader; - bool success = reader.parse( value, root ); - if (!success) - // Raise exception - throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); - try { + const Json::Value root = openshot::stringToJson(value); // Set all values that match SetJsonValue(root); } - catch (exception e) + catch (const std::exception& e) { // Error parsing JSON (or missing keys) - throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); } } -// Load Json::JsonValue into this object -void Coordinate::SetJsonValue(Json::Value root) { +// Load Json::Value into this object +void Coordinate::SetJsonValue(const Json::Value root) { // Set data from Json (if key is found) if (!root["X"].isNull()) X = root["X"].asDouble(); if (!root["Y"].isNull()) Y = root["Y"].asDouble(); - if (!root["increasing"].isNull()) - increasing = root["increasing"].asBool(); - if (!root["repeated"].isNull() && root["repeated"].isObject()) - { - if (!root["repeated"]["num"].isNull()) - repeated.num = root["repeated"]["num"].asInt(); - if (!root["repeated"]["den"].isNull()) - repeated.den = root["repeated"]["den"].asInt(); - } - if (!root["delta"].isNull()) - delta = root["delta"].asDouble(); } diff -Nru libopenshot-0.2.2+dfsg1/src/CrashHandler.cpp libopenshot-0.2.5+dfsg1/src/CrashHandler.cpp --- libopenshot-0.2.2+dfsg1/src/CrashHandler.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/CrashHandler.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for CrashHandler class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the diff -Nru libopenshot-0.2.2+dfsg1/src/DecklinkInput.cpp libopenshot-0.2.5+dfsg1/src/DecklinkInput.cpp --- libopenshot-0.2.2+dfsg1/src/DecklinkInput.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/DecklinkInput.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,7 +3,10 @@ * @brief Source file for DecklinkInput class * @author Jonathan Thomas , Blackmagic Design * - * @section LICENSE + * @ref License + */ + +/* LICENSE * * Copyright (c) 2009 Blackmagic Design * @@ -30,7 +33,7 @@ * DEALINGS IN THE SOFTWARE. * * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the diff -Nru libopenshot-0.2.2+dfsg1/src/DecklinkOutput.cpp libopenshot-0.2.5+dfsg1/src/DecklinkOutput.cpp --- libopenshot-0.2.2+dfsg1/src/DecklinkOutput.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/DecklinkOutput.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,7 +3,10 @@ * @brief Source file for DecklinkOutput class * @author Jonathan Thomas , Blackmagic Design * - * @section LICENSE + * @ref License + */ + +/* LICENSE * * Copyright (c) 2009 Blackmagic Design * @@ -30,7 +33,7 @@ * DEALINGS IN THE SOFTWARE. * * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the diff -Nru libopenshot-0.2.2+dfsg1/src/DecklinkReader.cpp libopenshot-0.2.5+dfsg1/src/DecklinkReader.cpp --- libopenshot-0.2.2+dfsg1/src/DecklinkReader.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/DecklinkReader.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for DecklinkReader class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -243,14 +246,14 @@ // Generate JSON string of this object -string DecklinkReader::Json() { +std::string DecklinkReader::Json() const { // Return formatted string return JsonValue().toStyledString(); } -// Generate Json::JsonValue for this object -Json::Value DecklinkReader::JsonValue() { +// Generate Json::Value for this object +Json::Value DecklinkReader::JsonValue() const { // Create root json object Json::Value root = ReaderBase::JsonValue(); // get parent properties @@ -261,30 +264,24 @@ } // Load JSON string into this object -void DecklinkReader::SetJson(string value) { +void DecklinkReader::SetJson(const std::string value) { // Parse JSON string into JSON objects - Json::Value root; - Json::Reader reader; - bool success = reader.parse( value, root ); - if (!success) - // Raise exception - throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); - try { + const Json::Value root = openshot::stringToJson(value); // Set all values that match SetJsonValue(root); } - catch (exception e) + catch (const std::exception& e) { // Error parsing JSON (or missing keys) - throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); } } -// Load Json::JsonValue into this object -void DecklinkReader::SetJsonValue(Json::Value root) { +// Load Json::Value into this object +void DecklinkReader::SetJsonValue(const Json::Value root) { // Set parent data ReaderBase::SetJsonValue(root); diff -Nru libopenshot-0.2.2+dfsg1/src/DecklinkWriter.cpp libopenshot-0.2.5+dfsg1/src/DecklinkWriter.cpp --- libopenshot-0.2.2+dfsg1/src/DecklinkWriter.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/DecklinkWriter.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for DecklinkWriter class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -231,7 +234,7 @@ { // Check for open reader (or throw exception) if (!is_open) - throw WriterClosed("The DecklinkWriter is closed. Call Open() before calling this method.", ""); + throw WriterClosed("The DecklinkWriter is closed. Call Open() before calling this method."); delegate->WriteFrame(frame); } diff -Nru libopenshot-0.2.2+dfsg1/src/DummyReader.cpp libopenshot-0.2.5+dfsg1/src/DummyReader.cpp --- libopenshot-0.2.2+dfsg1/src/DummyReader.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/DummyReader.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for DummyReader class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -66,11 +69,14 @@ info.display_ratio.num = size.num; info.display_ratio.den = size.den; - // Open and Close the reader, to populate it's attributes (such as height, width, etc...) + // Open and Close the reader, to populate its attributes (such as height, width, etc...) Open(); Close(); } +DummyReader::~DummyReader() { +} + // Open image file void DummyReader::Open() { @@ -118,14 +124,14 @@ } // Generate JSON string of this object -string DummyReader::Json() { +std::string DummyReader::Json() const { // Return formatted string return JsonValue().toStyledString(); } -// Generate Json::JsonValue for this object -Json::Value DummyReader::JsonValue() { +// Generate Json::Value for this object +Json::Value DummyReader::JsonValue() const { // Create root json object Json::Value root = ReaderBase::JsonValue(); // get parent properties @@ -136,30 +142,24 @@ } // Load JSON string into this object -void DummyReader::SetJson(string value) { +void DummyReader::SetJson(const std::string value) { // Parse JSON string into JSON objects - Json::Value root; - Json::Reader reader; - bool success = reader.parse( value, root ); - if (!success) - // Raise exception - throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); - try { + const Json::Value root = openshot::stringToJson(value); // Set all values that match SetJsonValue(root); } - catch (exception e) + catch (const std::exception& e) { // Error parsing JSON (or missing keys) - throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); } } -// Load Json::JsonValue into this object -void DummyReader::SetJsonValue(Json::Value root) { +// Load Json::Value into this object +void DummyReader::SetJsonValue(const Json::Value root) { // Set parent data ReaderBase::SetJsonValue(root); diff -Nru libopenshot-0.2.2+dfsg1/src/EffectBase.cpp libopenshot-0.2.5+dfsg1/src/EffectBase.cpp --- libopenshot-0.2.2+dfsg1/src/EffectBase.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/EffectBase.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for EffectBase class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -29,7 +32,7 @@ using namespace openshot; -// Initialize the values of the FileInfo struct +// Initialize the values of the EffectInfo struct void EffectBase::InitEffectInfo() { // Init clip settings @@ -47,15 +50,15 @@ // Display file information void EffectBase::DisplayInfo() { - cout << fixed << setprecision(2) << boolalpha; - cout << "----------------------------" << endl; - cout << "----- Effect Information -----" << endl; - cout << "----------------------------" << endl; - cout << "--> Name: " << info.name << endl; - cout << "--> Description: " << info.description << endl; - cout << "--> Has Video: " << info.has_video << endl; - cout << "--> Has Audio: " << info.has_audio << endl; - cout << "----------------------------" << endl; + std::cout << std::fixed << std::setprecision(2) << std::boolalpha; + std::cout << "----------------------------" << std::endl; + std::cout << "----- Effect Information -----" << std::endl; + std::cout << "----------------------------" << std::endl; + std::cout << "--> Name: " << info.name << std::endl; + std::cout << "--> Description: " << info.description << std::endl; + std::cout << "--> Has Video: " << info.has_video << std::endl; + std::cout << "--> Has Audio: " << info.has_audio << std::endl; + std::cout << "----------------------------" << std::endl; } // Constrain a color value from 0 to 255 @@ -71,20 +74,19 @@ } // Generate JSON string of this object -string EffectBase::Json() { +std::string EffectBase::Json() const { // Return formatted string return JsonValue().toStyledString(); } -// Generate Json::JsonValue for this object -Json::Value EffectBase::JsonValue() { +// Generate Json::Value for this object +Json::Value EffectBase::JsonValue() const { // Create root json object Json::Value root = ClipBase::JsonValue(); // get parent properties root["name"] = info.name; root["class_name"] = info.class_name; - root["short_name"] = info.short_name; root["description"] = info.description; root["has_video"] = info.has_video; root["has_audio"] = info.has_audio; @@ -95,30 +97,24 @@ } // Load JSON string into this object -void EffectBase::SetJson(string value) { +void EffectBase::SetJson(const std::string value) { // Parse JSON string into JSON objects - Json::Value root; - Json::Reader reader; - bool success = reader.parse( value, root ); - if (!success) - // Raise exception - throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); - try { + const Json::Value root = openshot::stringToJson(value); // Set all values that match SetJsonValue(root); } - catch (exception e) + catch (const std::exception& e) { // Error parsing JSON (or missing keys) - throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); } } -// Load Json::JsonValue into this object -void EffectBase::SetJsonValue(Json::Value root) { +// Load Json::Value into this object +void EffectBase::SetJsonValue(const Json::Value root) { // Set parent data ClipBase::SetJsonValue(root); @@ -128,14 +124,13 @@ Order(root["order"].asInt()); } -// Generate Json::JsonValue for this object -Json::Value EffectBase::JsonInfo() { +// Generate Json::Value for this object +Json::Value EffectBase::JsonInfo() const { // Create root json object Json::Value root; root["name"] = info.name; root["class_name"] = info.class_name; - root["short_name"] = info.short_name; root["description"] = info.description; root["has_video"] = info.has_video; root["has_audio"] = info.has_audio; diff -Nru libopenshot-0.2.2+dfsg1/src/EffectInfo.cpp libopenshot-0.2.5+dfsg1/src/EffectInfo.cpp --- libopenshot-0.2.2+dfsg1/src/EffectInfo.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/EffectInfo.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for EffectInfo class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -32,14 +35,14 @@ // Generate JSON string of this object -string EffectInfo::Json() { +std::string EffectInfo::Json() { // Return formatted string return JsonValue().toStyledString(); } // Create a new effect instance -EffectBase* EffectInfo::CreateEffect(string effect_type) { +EffectBase* EffectInfo::CreateEffect(std::string effect_type) { // Init the matching effect object if (effect_type == "Bars") return new Bars(); @@ -53,7 +56,7 @@ else if (effect_type == "ChromaKey") return new ChromaKey(); - else if (effect_type == "Color Shift") + else if (effect_type == "ColorShift") return new ColorShift(); else if (effect_type == "Crop") @@ -85,7 +88,7 @@ return NULL; } -// Generate Json::JsonValue for this object +// Generate Json::Value for this object Json::Value EffectInfo::JsonValue() { // Create root json object diff -Nru libopenshot-0.2.2+dfsg1/src/effects/Bars.cpp libopenshot-0.2.5+dfsg1/src/effects/Bars.cpp --- libopenshot-0.2.2+dfsg1/src/effects/Bars.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/effects/Bars.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for Bars effect class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -111,14 +114,14 @@ } // Generate JSON string of this object -string Bars::Json() { +std::string Bars::Json() const { // Return formatted string return JsonValue().toStyledString(); } -// Generate Json::JsonValue for this object -Json::Value Bars::JsonValue() { +// Generate Json::Value for this object +Json::Value Bars::JsonValue() const { // Create root json object Json::Value root = EffectBase::JsonValue(); // get parent properties @@ -134,30 +137,24 @@ } // Load JSON string into this object -void Bars::SetJson(string value) { +void Bars::SetJson(const std::string value) { // Parse JSON string into JSON objects - Json::Value root; - Json::Reader reader; - bool success = reader.parse( value, root ); - if (!success) - // Raise exception - throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); - try { + const Json::Value root = openshot::stringToJson(value); // Set all values that match SetJsonValue(root); } - catch (exception e) + catch (const std::exception& e) { // Error parsing JSON (or missing keys) - throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); } } -// Load Json::JsonValue into this object -void Bars::SetJsonValue(Json::Value root) { +// Load Json::Value into this object +void Bars::SetJsonValue(const Json::Value root) { // Set parent data EffectBase::SetJsonValue(root); @@ -176,7 +173,7 @@ } // Get all properties for a specific frame -string Bars::PropertiesJSON(int64_t requested_frame) { +std::string Bars::PropertiesJSON(int64_t requested_frame) const { // Generate JSON properties list Json::Value root; @@ -200,4 +197,3 @@ // Return formatted string return root.toStyledString(); } - diff -Nru libopenshot-0.2.2+dfsg1/src/effects/Blur.cpp libopenshot-0.2.5+dfsg1/src/effects/Blur.cpp --- libopenshot-0.2.2+dfsg1/src/effects/Blur.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/effects/Blur.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for Blur effect class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -71,192 +74,109 @@ float sigma_value = sigma.GetValue(frame_number); int iteration_value = iterations.GetInt(frame_number); + int w = frame_image->width(); + int h = frame_image->height(); - // Declare arrays for each color channel - unsigned char *red = new unsigned char[frame_image->width() * frame_image->height()](); - unsigned char *green = new unsigned char[frame_image->width() * frame_image->height()](); - unsigned char *blue = new unsigned char[frame_image->width() * frame_image->height()](); - unsigned char *alpha = new unsigned char[frame_image->width() * frame_image->height()](); - // Create empty target RGBA arrays (for the results of our blur) - unsigned char *blur_red = new unsigned char[frame_image->width() * frame_image->height()](); - unsigned char *blur_green = new unsigned char[frame_image->width() * frame_image->height()](); - unsigned char *blur_blue = new unsigned char[frame_image->width() * frame_image->height()](); - unsigned char *blur_alpha = new unsigned char[frame_image->width() * frame_image->height()](); - - // Loop through pixels and split RGBA channels into separate arrays - unsigned char *pixels = (unsigned char *) frame_image->bits(); - for (int pixel = 0, byte_index=0; pixel < frame_image->width() * frame_image->height(); pixel++, byte_index+=4) - { - // Get the RGBA values from each pixel - unsigned char R = pixels[byte_index]; - unsigned char G = pixels[byte_index + 1]; - unsigned char B = pixels[byte_index + 2]; - unsigned char A = pixels[byte_index + 3]; - - // Split channels into their own arrays - red[pixel] = R; - green[pixel] = G; - blue[pixel] = B; - alpha[pixel] = A; - } - - // Init target RGBA arrays - for (int i = 0; i < (frame_image->width() * frame_image->height()); i++) blur_red[i] = red[i]; - for (int i = 0; i < (frame_image->width() * frame_image->height()); i++) blur_green[i] = green[i]; - for (int i = 0; i < (frame_image->width() * frame_image->height()); i++) blur_blue[i] = blue[i]; - for (int i = 0; i < (frame_image->width() * frame_image->height()); i++) blur_alpha[i] = alpha[i]; + // Grab two copies of the image pixel data + QImage image_copy = frame_image->copy(); + std::shared_ptr frame_image_2 = std::make_shared(image_copy); // Loop through each iteration - for (int iteration = 0; iteration < iteration_value; iteration++) + for (int iteration = 0; iteration < iteration_value; ++iteration) { // HORIZONTAL BLUR (if any) if (horizontal_radius_value > 0.0) { - // Init boxes for computing blur - int *bxs = initBoxes(sigma_value, horizontal_radius_value); - // Apply horizontal blur to target RGBA channels - boxBlurH(red, blur_red, frame_image->width(), frame_image->height(), horizontal_radius_value); - boxBlurH(green, blur_green, frame_image->width(), frame_image->height(), horizontal_radius_value); - boxBlurH(blue, blur_blue, frame_image->width(), frame_image->height(), horizontal_radius_value); - boxBlurH(alpha, blur_alpha, frame_image->width(), frame_image->height(), horizontal_radius_value); - - // Remove boxes - delete[] bxs; - - // Copy blur_ back to for vertical blur or next iteration - for (int i = 0; i < (frame_image->width() * frame_image->height()); i++) red[i] = blur_red[i]; - for (int i = 0; i < (frame_image->width() * frame_image->height()); i++) green[i] = blur_green[i]; - for (int i = 0; i < (frame_image->width() * frame_image->height()); i++) blue[i] = blur_blue[i]; - for (int i = 0; i < (frame_image->width() * frame_image->height()); i++) alpha[i] = blur_alpha[i]; + boxBlurH(frame_image->bits(), frame_image_2->bits(), w, h, horizontal_radius_value); + + // Swap output image back to input + frame_image.swap(frame_image_2); } // VERTICAL BLUR (if any) if (vertical_radius_value > 0.0) { - // Init boxes for computing blur - int *bxs = initBoxes(sigma_value, vertical_radius_value); - // Apply vertical blur to target RGBA channels - boxBlurT(red, blur_red, frame_image->width(), frame_image->height(), vertical_radius_value); - boxBlurT(green, blur_green, frame_image->width(), frame_image->height(), vertical_radius_value); - boxBlurT(blue, blur_blue, frame_image->width(), frame_image->height(), vertical_radius_value); - boxBlurT(alpha, blur_alpha, frame_image->width(), frame_image->height(), vertical_radius_value); - - // Remove boxes - delete[] bxs; - - // Copy blur_ back to for vertical blur or next iteration - for (int i = 0; i < (frame_image->width() * frame_image->height()); i++) red[i] = blur_red[i]; - for (int i = 0; i < (frame_image->width() * frame_image->height()); i++) green[i] = blur_green[i]; - for (int i = 0; i < (frame_image->width() * frame_image->height()); i++) blue[i] = blur_blue[i]; - for (int i = 0; i < (frame_image->width() * frame_image->height()); i++) alpha[i] = blur_alpha[i]; - } - } + boxBlurT(frame_image->bits(), frame_image_2->bits(), w, h, vertical_radius_value); - // Copy RGBA channels back to original image - for (int pixel = 0, byte_index=0; pixel < frame_image->width() * frame_image->height(); pixel++, byte_index+=4) - { - // Get the RGB values from the pixel - unsigned char R = blur_red[pixel]; - unsigned char G = blur_green[pixel]; - unsigned char B = blur_blue[pixel]; - unsigned char A = blur_alpha[pixel]; - - // Split channels into their own arrays - pixels[byte_index] = R; - pixels[byte_index + 1] = G; - pixels[byte_index + 2] = B; - pixels[byte_index + 3] = A; + // Swap output image back to input + frame_image.swap(frame_image_2); + } } - // Delete channel arrays - delete[] red; - delete[] green; - delete[] blue; - delete[] alpha; - delete[] blur_red; - delete[] blur_green; - delete[] blur_blue; - delete[] blur_alpha; - // return the modified frame return frame; } // Credit: http://blog.ivank.net/fastest-gaussian-blur.html (MIT License) -int* Blur::initBoxes(float sigma, int n) // standard deviation, number of boxes -{ - float wIdeal = sqrt((12.0 * sigma * sigma / n) + 1.0); // Ideal averaging filter width - int wl = floor(wIdeal); - if (wl % 2 == 0) wl--; - int wu = wl + 2; - - float mIdeal = (12.0 * sigma * sigma - n * wl * wl - 4 * n * wl - 3 * n) / (-4.0 * wl - 4); - int m = round(mIdeal); - - int *sizes = new int[n](); - for (int i = 0; i < n; i++) sizes[i] = i < m ? wl : wu; - return sizes; -} - -// Credit: http://blog.ivank.net/fastest-gaussian-blur.html (MIT License) +// Modified to process all four channels in a pixel array void Blur::boxBlurH(unsigned char *scl, unsigned char *tcl, int w, int h, int r) { float iarr = 1.0 / (r + r + 1); - for (int i = 0; i < h; i++) { - int ti = i * w, li = ti, ri = ti + r; - int fv = scl[ti], lv = scl[ti + w - 1], val = (r + 1) * fv; - for (int j = 0; j < r; j++) val += scl[ti + j]; - for (int j = 0; j <= r; j++) { - val += scl[ri++] - fv; - tcl[ti++] = round(val * iarr); - } - for (int j = r + 1; j < w - r; j++) { - val += scl[ri++] - scl[li++]; - tcl[ti++] = round(val * iarr); - } - for (int j = w - r; j < w; j++) { - val += lv - scl[li++]; - tcl[ti++] = round(val * iarr); + + #pragma omp parallel for shared (scl, tcl) + for (int i = 0; i < h; ++i) { + for (int ch = 0; ch < 4; ++ch) { + int ti = i * w, li = ti, ri = ti + r; + int fv = scl[ti * 4 + ch], lv = scl[(ti + w - 1) * 4 + ch], val = (r + 1) * fv; + for (int j = 0; j < r; ++j) { + val += scl[(ti + j) * 4 + ch]; + } + for (int j = 0; j <= r; ++j) { + val += scl[ri++ * 4 + ch] - fv; + tcl[ti++ * 4 + ch] = round(val * iarr); + } + for (int j = r + 1; j < w - r; ++j) { + val += scl[ri++ * 4 + ch] - scl[li++ * 4 + ch]; + tcl[ti++ * 4 + ch] = round(val * iarr); + } + for (int j = w - r; j < w; ++j) { + val += lv - scl[li++ * 4 + ch]; + tcl[ti++ * 4 + ch] = round(val * iarr); + } } } } void Blur::boxBlurT(unsigned char *scl, unsigned char *tcl, int w, int h, int r) { float iarr = 1.0 / (r + r + 1); + + #pragma omp parallel for shared (scl, tcl) for (int i = 0; i < w; i++) { - int ti = i, li = ti, ri = ti + r * w; - int fv = scl[ti], lv = scl[ti + w * (h - 1)], val = (r + 1) * fv; - for (int j = 0; j < r; j++) val += scl[ti + j * w]; - for (int j = 0; j <= r; j++) { - val += scl[ri] - fv; - tcl[ti] = round(val * iarr); - ri += w; - ti += w; - } - for (int j = r + 1; j < h - r; j++) { - val += scl[ri] - scl[li]; - tcl[ti] = round(val * iarr); - li += w; - ri += w; - ti += w; - } - for (int j = h - r; j < h; j++) { - val += lv - scl[li]; - tcl[ti] = round(val * iarr); - li += w; - ti += w; + for (int ch = 0; ch < 4; ++ch) { + int ti = i, li = ti, ri = ti + r * w; + int fv = scl[ti * 4 + ch], lv = scl[(ti + w * (h - 1)) * 4 + ch], val = (r + 1) * fv; + for (int j = 0; j < r; j++) val += scl[(ti + j * w) * 4 + ch]; + for (int j = 0; j <= r; j++) { + val += scl[ri * 4 + ch] - fv; + tcl[ti * 4 + ch] = round(val * iarr); + ri += w; + ti += w; + } + for (int j = r + 1; j < h - r; j++) { + val += scl[ri * 4 + ch] - scl[li * 4 + ch]; + tcl[ti * 4 + ch] = round(val * iarr); + li += w; + ri += w; + ti += w; + } + for (int j = h - r; j < h; j++) { + val += lv - scl[li * 4 + ch]; + tcl[ti * 4 + ch] = round(val * iarr); + li += w; + ti += w; + } } } } // Generate JSON string of this object -string Blur::Json() { +std::string Blur::Json() const { // Return formatted string return JsonValue().toStyledString(); } -// Generate Json::JsonValue for this object -Json::Value Blur::JsonValue() { +// Generate Json::Value for this object +Json::Value Blur::JsonValue() const { // Create root json object Json::Value root = EffectBase::JsonValue(); // get parent properties @@ -271,30 +191,24 @@ } // Load JSON string into this object -void Blur::SetJson(string value) { +void Blur::SetJson(const std::string value) { // Parse JSON string into JSON objects - Json::Value root; - Json::Reader reader; - bool success = reader.parse( value, root ); - if (!success) - // Raise exception - throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); - try { + const Json::Value root = openshot::stringToJson(value); // Set all values that match SetJsonValue(root); } - catch (exception e) + catch (const std::exception& e) { // Error parsing JSON (or missing keys) - throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); } } -// Load Json::JsonValue into this object -void Blur::SetJsonValue(Json::Value root) { +// Load Json::Value into this object +void Blur::SetJsonValue(const Json::Value root) { // Set parent data EffectBase::SetJsonValue(root); @@ -311,7 +225,7 @@ } // Get all properties for a specific frame -string Blur::PropertiesJSON(int64_t requested_frame) { +std::string Blur::PropertiesJSON(int64_t requested_frame) const { // Generate JSON properties list Json::Value root; @@ -331,4 +245,3 @@ // Return formatted string return root.toStyledString(); } - diff -Nru libopenshot-0.2.2+dfsg1/src/effects/Brightness.cpp libopenshot-0.2.5+dfsg1/src/effects/Brightness.cpp --- libopenshot-0.2.2+dfsg1/src/effects/Brightness.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/effects/Brightness.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for Brightness class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -69,35 +72,24 @@ // Loop through pixels unsigned char *pixels = (unsigned char *) frame_image->bits(); - for (int pixel = 0, byte_index=0; pixel < frame_image->width() * frame_image->height(); pixel++, byte_index+=4) - { - // Get the RGB values from the pixel - int R = pixels[byte_index]; - int G = pixels[byte_index + 1]; - int B = pixels[byte_index + 2]; - int A = pixels[byte_index + 3]; + int pixel_count = frame_image->width() * frame_image->height(); - // Adjust the contrast + #pragma omp parallel for + for (int pixel = 0; pixel < pixel_count; ++pixel) + { + // Compute contrast adjustment factor float factor = (259 * (contrast_value + 255)) / (255 * (259 - contrast_value)); - R = constrain((factor * (R - 128)) + 128); - G = constrain((factor * (G - 128)) + 128); - B = constrain((factor * (B - 128)) + 128); - - // Adjust the brightness - R += (255 * brightness_value); - G += (255 * brightness_value); - B += (255 * brightness_value); - - // Constrain the value from 0 to 255 - R = constrain(R); - G = constrain(G); - B = constrain(B); - - // Set all pixels to new value - pixels[byte_index] = R; - pixels[byte_index + 1] = G; - pixels[byte_index + 2] = B; - pixels[byte_index + 3] = A; // leave the alpha value alone + + // Get RGB pixels from image and apply constrained contrast adjustment + int R = constrain((factor * (pixels[pixel * 4] - 128)) + 128); + int G = constrain((factor * (pixels[pixel * 4 + 1] - 128)) + 128); + int B = constrain((factor * (pixels[pixel * 4 + 2] - 128)) + 128); + // (Don't modify Alpha value) + + // Adjust brightness and write constrained values back to image + pixels[pixel * 4] = constrain(R + (255 * brightness_value)); + pixels[pixel * 4 + 1] = constrain(G + (255 * brightness_value)); + pixels[pixel * 4 + 2] = constrain(B + (255 * brightness_value)); } // return the modified frame @@ -105,14 +97,14 @@ } // Generate JSON string of this object -string Brightness::Json() { +std::string Brightness::Json() const { // Return formatted string return JsonValue().toStyledString(); } -// Generate Json::JsonValue for this object -Json::Value Brightness::JsonValue() { +// Generate Json::Value for this object +Json::Value Brightness::JsonValue() const { // Create root json object Json::Value root = EffectBase::JsonValue(); // get parent properties @@ -125,30 +117,24 @@ } // Load JSON string into this object -void Brightness::SetJson(string value) { +void Brightness::SetJson(const std::string value) { // Parse JSON string into JSON objects - Json::Value root; - Json::Reader reader; - bool success = reader.parse( value, root ); - if (!success) - // Raise exception - throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); - try { + const Json::Value root = openshot::stringToJson(value); // Set all values that match SetJsonValue(root); } - catch (exception e) + catch (const std::exception& e) { // Error parsing JSON (or missing keys) - throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); } } -// Load Json::JsonValue into this object -void Brightness::SetJsonValue(Json::Value root) { +// Load Json::Value into this object +void Brightness::SetJsonValue(const Json::Value root) { // Set parent data EffectBase::SetJsonValue(root); @@ -161,7 +147,7 @@ } // Get all properties for a specific frame -string Brightness::PropertiesJSON(int64_t requested_frame) { +std::string Brightness::PropertiesJSON(int64_t requested_frame) const { // Generate JSON properties list Json::Value root; @@ -179,4 +165,3 @@ // Return formatted string return root.toStyledString(); } - diff -Nru libopenshot-0.2.2+dfsg1/src/effects/ChromaKey.cpp libopenshot-0.2.5+dfsg1/src/effects/ChromaKey.cpp --- libopenshot-0.2.2+dfsg1/src/effects/ChromaKey.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/effects/ChromaKey.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for ChromaKey class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -98,14 +101,14 @@ } // Generate JSON string of this object -string ChromaKey::Json() { +std::string ChromaKey::Json() const { // Return formatted string return JsonValue().toStyledString(); } -// Generate Json::JsonValue for this object -Json::Value ChromaKey::JsonValue() { +// Generate Json::Value for this object +Json::Value ChromaKey::JsonValue() const { // Create root json object Json::Value root = EffectBase::JsonValue(); // get parent properties @@ -118,30 +121,24 @@ } // Load JSON string into this object -void ChromaKey::SetJson(string value) { +void ChromaKey::SetJson(const std::string value) { // Parse JSON string into JSON objects - Json::Value root; - Json::Reader reader; - bool success = reader.parse( value, root ); - if (!success) - // Raise exception - throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); - try { + const Json::Value root = openshot::stringToJson(value); // Set all values that match SetJsonValue(root); } - catch (exception e) + catch (const std::exception& e) { // Error parsing JSON (or missing keys) - throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); } } -// Load Json::JsonValue into this object -void ChromaKey::SetJsonValue(Json::Value root) { +// Load Json::Value into this object +void ChromaKey::SetJsonValue(const Json::Value root) { // Set parent data EffectBase::SetJsonValue(root); @@ -154,7 +151,7 @@ } // Get all properties for a specific frame -string ChromaKey::PropertiesJSON(int64_t requested_frame) { +std::string ChromaKey::PropertiesJSON(int64_t requested_frame) const { // Generate JSON properties list Json::Value root; diff -Nru libopenshot-0.2.2+dfsg1/src/effects/ColorShift.cpp libopenshot-0.2.5+dfsg1/src/effects/ColorShift.cpp --- libopenshot-0.2.2+dfsg1/src/effects/ColorShift.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/effects/ColorShift.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for Color Shift effect class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -30,7 +33,7 @@ using namespace openshot; /// Blank constructor, useful when using Json to load the effect properties -ColorShift::ColorShift() : red_x(-0.05), red_y(0.0), green_x(0.05), green_y(0.0), blue_x(0.0), blue_y(0.0), alpha_x(0.0), alpha_y(0.0) { +ColorShift::ColorShift() : red_x(0.0), red_y(0.0), green_x(0.0), green_y(0.0), blue_x(0.0), blue_y(0.0), alpha_x(0.0), alpha_y(0.0) { // Init effect properties init_effect_details(); } @@ -50,7 +53,7 @@ InitEffectInfo(); /// Set the effect info - info.class_name = "Color Shift"; + info.class_name = "ColorShift"; info.name = "Color Shift"; info.description = "Shift the colors of an image up, down, left, and right (with infinite wrapping)."; info.has_audio = false; @@ -72,25 +75,24 @@ // Get the current shift amount, and clamp to range (-1 to 1 range) // Red Keyframes float red_x_shift = red_x.GetValue(frame_number); - int red_x_shift_limit = round(frame_image_width * fmod(abs(red_x_shift), 1.0)); + int red_x_shift_limit = round(frame_image_width * fmod(fabs(red_x_shift), 1.0)); float red_y_shift = red_y.GetValue(frame_number); - int red_y_shift_limit = round(frame_image_height * fmod(abs(red_y_shift), 1.0)); + int red_y_shift_limit = round(frame_image_height * fmod(fabs(red_y_shift), 1.0)); // Green Keyframes float green_x_shift = green_x.GetValue(frame_number); - int green_x_shift_limit = round(frame_image_width * fmod(abs(green_x_shift), 1.0)); + int green_x_shift_limit = round(frame_image_width * fmod(fabs(green_x_shift), 1.0)); float green_y_shift = green_y.GetValue(frame_number); - int green_y_shift_limit = round(frame_image_height * fmod(abs(green_y_shift), 1.0)); + int green_y_shift_limit = round(frame_image_height * fmod(fabs(green_y_shift), 1.0)); // Blue Keyframes float blue_x_shift = blue_x.GetValue(frame_number); - int blue_x_shift_limit = round(frame_image_width * fmod(abs(blue_x_shift), 1.0)); + int blue_x_shift_limit = round(frame_image_width * fmod(fabs(blue_x_shift), 1.0)); float blue_y_shift = blue_y.GetValue(frame_number); - int blue_y_shift_limit = round(frame_image_height * fmod(abs(blue_y_shift), 1.0)); + int blue_y_shift_limit = round(frame_image_height * fmod(fabs(blue_y_shift), 1.0)); // Alpha Keyframes float alpha_x_shift = alpha_x.GetValue(frame_number); - int alpha_x_shift_limit = round(frame_image_width * fmod(abs(alpha_x_shift), 1.0)); + int alpha_x_shift_limit = round(frame_image_width * fmod(fabs(alpha_x_shift), 1.0)); float alpha_y_shift = alpha_y.GetValue(frame_number); - int alpha_y_shift_limit = round(frame_image_height * fmod(abs(alpha_y_shift), 1.0)); - + int alpha_y_shift_limit = round(frame_image_height * fmod(fabs(alpha_y_shift), 1.0)); // Make temp copy of pixels unsigned char *temp_image = new unsigned char[frame_image_width * frame_image_height * 4](); @@ -127,7 +129,6 @@ blue_starting_row_index = starting_row_index; alpha_starting_row_index = starting_row_index; - red_pixel_offset = 0; green_pixel_offset = 0; blue_pixel_offset = 0; @@ -191,14 +192,14 @@ } // Generate JSON string of this object -string ColorShift::Json() { +std::string ColorShift::Json() const { // Return formatted string return JsonValue().toStyledString(); } -// Generate Json::JsonValue for this object -Json::Value ColorShift::JsonValue() { +// Generate Json::Value for this object +Json::Value ColorShift::JsonValue() const { // Create root json object Json::Value root = EffectBase::JsonValue(); // get parent properties @@ -217,30 +218,24 @@ } // Load JSON string into this object -void ColorShift::SetJson(string value) { +void ColorShift::SetJson(const std::string value) { // Parse JSON string into JSON objects - Json::Value root; - Json::Reader reader; - bool success = reader.parse( value, root ); - if (!success) - // Raise exception - throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); - try { + const Json::Value root = openshot::stringToJson(value); // Set all values that match SetJsonValue(root); } - catch (exception e) + catch (const std::exception& e) { // Error parsing JSON (or missing keys) - throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); } } -// Load Json::JsonValue into this object -void ColorShift::SetJsonValue(Json::Value root) { +// Load Json::Value into this object +void ColorShift::SetJsonValue(const Json::Value root) { // Set parent data EffectBase::SetJsonValue(root); @@ -265,7 +260,7 @@ } // Get all properties for a specific frame -string ColorShift::PropertiesJSON(int64_t requested_frame) { +std::string ColorShift::PropertiesJSON(int64_t requested_frame) const { // Generate JSON properties list Json::Value root; @@ -289,4 +284,3 @@ // Return formatted string return root.toStyledString(); } - diff -Nru libopenshot-0.2.2+dfsg1/src/effects/Crop.cpp libopenshot-0.2.5+dfsg1/src/effects/Crop.cpp --- libopenshot-0.2.2+dfsg1/src/effects/Crop.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/effects/Crop.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for Crop effect class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -111,14 +114,14 @@ } // Generate JSON string of this object -string Crop::Json() { +std::string Crop::Json() const { // Return formatted string return JsonValue().toStyledString(); } -// Generate Json::JsonValue for this object -Json::Value Crop::JsonValue() { +// Generate Json::Value for this object +Json::Value Crop::JsonValue() const { // Create root json object Json::Value root = EffectBase::JsonValue(); // get parent properties @@ -133,30 +136,24 @@ } // Load JSON string into this object -void Crop::SetJson(string value) { +void Crop::SetJson(const std::string value) { // Parse JSON string into JSON objects - Json::Value root; - Json::Reader reader; - bool success = reader.parse( value, root ); - if (!success) - // Raise exception - throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); - try { + const Json::Value root = openshot::stringToJson(value); // Set all values that match SetJsonValue(root); } - catch (exception e) + catch (const std::exception& e) { // Error parsing JSON (or missing keys) - throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); } } -// Load Json::JsonValue into this object -void Crop::SetJsonValue(Json::Value root) { +// Load Json::Value into this object +void Crop::SetJsonValue(const Json::Value root) { // Set parent data EffectBase::SetJsonValue(root); @@ -173,7 +170,7 @@ } // Get all properties for a specific frame -string Crop::PropertiesJSON(int64_t requested_frame) { +std::string Crop::PropertiesJSON(int64_t requested_frame) const { // Generate JSON properties list Json::Value root; @@ -193,4 +190,3 @@ // Return formatted string return root.toStyledString(); } - diff -Nru libopenshot-0.2.2+dfsg1/src/effects/Deinterlace.cpp libopenshot-0.2.5+dfsg1/src/effects/Deinterlace.cpp --- libopenshot-0.2.2+dfsg1/src/effects/Deinterlace.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/effects/Deinterlace.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for De-interlace class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -93,14 +96,14 @@ } // Generate JSON string of this object -string Deinterlace::Json() { +std::string Deinterlace::Json() const { // Return formatted string return JsonValue().toStyledString(); } -// Generate Json::JsonValue for this object -Json::Value Deinterlace::JsonValue() { +// Generate Json::Value for this object +Json::Value Deinterlace::JsonValue() const { // Create root json object Json::Value root = EffectBase::JsonValue(); // get parent properties @@ -112,30 +115,24 @@ } // Load JSON string into this object -void Deinterlace::SetJson(string value) { +void Deinterlace::SetJson(const std::string value) { // Parse JSON string into JSON objects - Json::Value root; - Json::Reader reader; - bool success = reader.parse( value, root ); - if (!success) - // Raise exception - throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); - try { + const Json::Value root = openshot::stringToJson(value); // Set all values that match SetJsonValue(root); } - catch (exception e) + catch (const std::exception& e) { // Error parsing JSON (or missing keys) - throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); } } -// Load Json::JsonValue into this object -void Deinterlace::SetJsonValue(Json::Value root) { +// Load Json::Value into this object +void Deinterlace::SetJsonValue(const Json::Value root) { // Set parent data EffectBase::SetJsonValue(root); @@ -146,7 +143,7 @@ } // Get all properties for a specific frame -string Deinterlace::PropertiesJSON(int64_t requested_frame) { +std::string Deinterlace::PropertiesJSON(int64_t requested_frame) const { // Generate JSON properties list Json::Value root; diff -Nru libopenshot-0.2.2+dfsg1/src/effects/Hue.cpp libopenshot-0.2.5+dfsg1/src/effects/Hue.cpp --- libopenshot-0.2.2+dfsg1/src/effects/Hue.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/effects/Hue.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for Hue effect class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -63,36 +66,35 @@ // Get the frame's image std::shared_ptr frame_image = frame->GetImage(); + int pixel_count = frame_image->width() * frame_image->height(); + // Get the current hue percentage shift amount, and convert to degrees double degrees = 360.0 * hue.GetValue(frame_number); float cosA = cos(degrees*3.14159265f/180); float sinA = sin(degrees*3.14159265f/180); // Calculate a rotation matrix for the RGB colorspace (based on the current hue shift keyframe value) - float matrix[3][3] = {{cosA + (1.0f - cosA) / 3.0f, 1.0f/3.0f * (1.0f - cosA) - sqrtf(1.0f/3.0f) * sinA, 1.0f/3.0f * (1.0f - cosA) + sqrtf(1.0f/3.0f) * sinA}, - {1.0f/3.0f * (1.0f - cosA) + sqrtf(1.0f/3.0f) * sinA, cosA + 1.0f/3.0f*(1.0f - cosA), 1.0f/3.0f * (1.0f - cosA) - sqrtf(1.0f/3.0f) * sinA}, - {1.0f/3.0f * (1.0f - cosA) - sqrtf(1.0f/3.0f) * sinA, 1.0f/3.0f * (1.0f - cosA) + sqrtf(1.0f/3.0f) * sinA, cosA + 1.0f/3.0f * (1.0f - cosA)}}; + float matrix[3] = { + cosA + (1.0f - cosA) / 3.0f, + 1.0f/3.0f * (1.0f - cosA) - sqrtf(1.0f/3.0f) * sinA, + 1.0f/3.0f * (1.0f - cosA) + sqrtf(1.0f/3.0f) * sinA + }; // Loop through pixels unsigned char *pixels = (unsigned char *) frame_image->bits(); - for (int pixel = 0, byte_index=0; pixel < frame_image->width() * frame_image->height(); pixel++, byte_index+=4) + + #pragma omp parallel for shared (pixels) + for (int pixel = 0; pixel < pixel_count; ++pixel) { - // Get the RGB values from the pixel - int R = pixels[byte_index]; - int G = pixels[byte_index + 1]; - int B = pixels[byte_index + 2]; - int A = pixels[byte_index + 3]; + // Get the RGB values from the pixel (ignore the alpha channel) + int R = pixels[pixel * 4]; + int G = pixels[pixel * 4 + 1]; + int B = pixels[pixel * 4 + 2]; // Multiply each color by the hue rotation matrix - float rx = constrain(R * matrix[0][0] + G * matrix[0][1] + B * matrix[0][2]); - float gx = constrain(R * matrix[1][0] + G * matrix[1][1] + B * matrix[1][2]); - float bx = constrain(R * matrix[2][0] + G * matrix[2][1] + B * matrix[2][2]); - - // Set all pixels to new value - pixels[byte_index] = rx; - pixels[byte_index + 1] = gx; - pixels[byte_index + 2] = bx; - pixels[byte_index + 3] = A; // leave the alpha value alone + pixels[pixel * 4] = constrain(R * matrix[0] + G * matrix[1] + B * matrix[2]); + pixels[pixel * 4 + 1] = constrain(R * matrix[2] + G * matrix[0] + B * matrix[1]); + pixels[pixel * 4 + 2] = constrain(R * matrix[1] + G * matrix[2] + B * matrix[0]); } // return the modified frame @@ -100,14 +102,14 @@ } // Generate JSON string of this object -string Hue::Json() { +std::string Hue::Json() const { // Return formatted string return JsonValue().toStyledString(); } -// Generate Json::JsonValue for this object -Json::Value Hue::JsonValue() { +// Generate Json::Value for this object +Json::Value Hue::JsonValue() const { // Create root json object Json::Value root = EffectBase::JsonValue(); // get parent properties @@ -119,30 +121,24 @@ } // Load JSON string into this object -void Hue::SetJson(string value) { +void Hue::SetJson(const std::string value) { // Parse JSON string into JSON objects - Json::Value root; - Json::Reader reader; - bool success = reader.parse( value, root ); - if (!success) - // Raise exception - throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); - try { + const Json::Value root = openshot::stringToJson(value); // Set all values that match SetJsonValue(root); } - catch (exception e) + catch (const std::exception& e) { // Error parsing JSON (or missing keys) - throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); } } -// Load Json::JsonValue into this object -void Hue::SetJsonValue(Json::Value root) { +// Load Json::Value into this object +void Hue::SetJsonValue(const Json::Value root) { // Set parent data EffectBase::SetJsonValue(root); @@ -153,7 +149,7 @@ } // Get all properties for a specific frame -string Hue::PropertiesJSON(int64_t requested_frame) { +std::string Hue::PropertiesJSON(int64_t requested_frame) const { // Generate JSON properties list Json::Value root; @@ -170,4 +166,3 @@ // Return formatted string return root.toStyledString(); } - diff -Nru libopenshot-0.2.2+dfsg1/src/effects/Mask.cpp libopenshot-0.2.5+dfsg1/src/effects/Mask.cpp --- libopenshot-0.2.2+dfsg1/src/effects/Mask.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/effects/Mask.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for Mask class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -147,14 +150,14 @@ } // Generate JSON string of this object -string Mask::Json() { +std::string Mask::Json() const { // Return formatted string return JsonValue().toStyledString(); } -// Generate Json::JsonValue for this object -Json::Value Mask::JsonValue() { +// Generate Json::Value for this object +Json::Value Mask::JsonValue() const { // Create root json object Json::Value root = EffectBase::JsonValue(); // get parent properties @@ -172,30 +175,24 @@ } // Load JSON string into this object -void Mask::SetJson(string value) { +void Mask::SetJson(const std::string value) { // Parse JSON string into JSON objects - Json::Value root; - Json::Reader reader; - bool success = reader.parse( value, root ); - if (!success) - // Raise exception - throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); - try { + const Json::Value root = openshot::stringToJson(value); // Set all values that match SetJsonValue(root); } - catch (exception e) + catch (const std::exception& e) { // Error parsing JSON (or missing keys) - throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); } } -// Load Json::JsonValue into this object -void Mask::SetJsonValue(Json::Value root) { +// Load Json::Value into this object +void Mask::SetJsonValue(const Json::Value root) { // Set parent data EffectBase::SetJsonValue(root); @@ -225,7 +222,7 @@ } // Create new reader (and load properties) - string type = root["reader"]["type"].asString(); + std::string type = root["reader"]["type"].asString(); if (type == "FFmpegReader") { @@ -234,11 +231,11 @@ reader->SetJsonValue(root["reader"]); #ifdef USE_IMAGEMAGICK - } else if (type == "ImageReader") { + } else if (type == "ImageReader") { - // Create new reader - reader = new ImageReader(root["reader"]["path"].asString()); - reader->SetJsonValue(root["reader"]); + // Create new reader + reader = new ImageReader(root["reader"]["path"].asString()); + reader->SetJsonValue(root["reader"]); #endif } else if (type == "QtImageReader") { @@ -262,7 +259,7 @@ } // Get all properties for a specific frame -string Mask::PropertiesJSON(int64_t requested_frame) { +std::string Mask::PropertiesJSON(int64_t requested_frame) const { // Generate JSON properties list Json::Value root; @@ -290,4 +287,3 @@ // Return formatted string return root.toStyledString(); } - diff -Nru libopenshot-0.2.2+dfsg1/src/effects/Negate.cpp libopenshot-0.2.5+dfsg1/src/effects/Negate.cpp --- libopenshot-0.2.2+dfsg1/src/effects/Negate.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/effects/Negate.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for Negate class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -55,14 +58,14 @@ } // Generate JSON string of this object -string Negate::Json() { +std::string Negate::Json() const { // Return formatted string return JsonValue().toStyledString(); } -// Generate Json::JsonValue for this object -Json::Value Negate::JsonValue() { +// Generate Json::Value for this object +Json::Value Negate::JsonValue() const { // Create root json object Json::Value root = EffectBase::JsonValue(); // get parent properties @@ -73,30 +76,24 @@ } // Load JSON string into this object -void Negate::SetJson(string value) { +void Negate::SetJson(const std::string value) { // Parse JSON string into JSON objects - Json::Value root; - Json::Reader reader; - bool success = reader.parse( value, root ); - if (!success) - // Raise exception - throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); - try { + const Json::Value root = openshot::stringToJson(value); // Set all values that match SetJsonValue(root); } - catch (exception e) + catch (const std::exception& e) { // Error parsing JSON (or missing keys) - throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); } } -// Load Json::JsonValue into this object -void Negate::SetJsonValue(Json::Value root) { +// Load Json::Value into this object +void Negate::SetJsonValue(const Json::Value root) { // Set parent data EffectBase::SetJsonValue(root); @@ -104,7 +101,7 @@ } // Get all properties for a specific frame -string Negate::PropertiesJSON(int64_t requested_frame) { +std::string Negate::PropertiesJSON(int64_t requested_frame) const { // Generate JSON properties list Json::Value root; diff -Nru libopenshot-0.2.2+dfsg1/src/effects/Pixelate.cpp libopenshot-0.2.5+dfsg1/src/effects/Pixelate.cpp --- libopenshot-0.2.2+dfsg1/src/effects/Pixelate.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/effects/Pixelate.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for Pixelate effect class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -65,41 +68,27 @@ std::shared_ptr frame_image = frame->GetImage(); // Get current keyframe values - double pixelization_value = 1.0 - min(fabs(pixelization.GetValue(frame_number)), 1.0); + double pixelization_value = 1.0 - std::min(fabs(pixelization.GetValue(frame_number)), 1.0); double left_value = left.GetValue(frame_number); double top_value = top.GetValue(frame_number); double right_value = right.GetValue(frame_number); double bottom_value = bottom.GetValue(frame_number); if (pixelization_value > 0.0) { - // Resize frame image smaller (based on pixelization value) - std::shared_ptr smaller_frame_image = std::shared_ptr(new QImage(frame_image->scaledToWidth(max(frame_image->width() * pixelization_value, 2.0), Qt::SmoothTransformation))); - - // Resize image back to original size (with no smoothing to create pixelated image) - std::shared_ptr pixelated_image = std::shared_ptr(new QImage(smaller_frame_image->scaledToWidth(frame_image->width(), Qt::FastTransformation).convertToFormat(QImage::Format_RGBA8888))); + int w = frame_image->width(); + int h = frame_image->height(); - // Get pixel array pointer - unsigned char *pixels = (unsigned char *) frame_image->bits(); - unsigned char *pixelated_pixels = (unsigned char *) pixelated_image->bits(); - - // Get pixels sizes of all margins - int top_bar_height = top_value * frame_image->height(); - int bottom_bar_height = bottom_value * frame_image->height(); - int left_bar_width = left_value * frame_image->width(); - int right_bar_width = right_value * frame_image->width(); - - // Loop through rows - for (int row = 0; row < frame_image->height(); row++) { - - // Copy pixelated pixels into original frame image (where needed) - if ((row >= top_bar_height) && (row <= frame_image->height() - bottom_bar_height)) { - memcpy(&pixels[(row * frame_image->width() + left_bar_width) * 4], &pixelated_pixels[(row * frame_image->width() + left_bar_width) * 4], sizeof(char) * (frame_image->width() - left_bar_width - right_bar_width) * 4); - } - } - - // Cleanup temp images - smaller_frame_image.reset(); - pixelated_image.reset(); + // Define area we're working on in terms of a QRect with QMargins applied + QRect area(QPoint(0,0), frame_image->size()); + area = area.marginsRemoved({int(left_value * w), int(top_value * h), int(right_value * w), int(bottom_value * h)}); + + // Copy and scale pixels in area to be pixelated + auto frame_scaled = frame_image->copy(area).scaledToWidth(area.width() * pixelization_value, Qt::SmoothTransformation); + + // Draw pixelated image back over original + QPainter painter(frame_image.get()); + painter.drawImage(area, frame_scaled); + painter.end(); } // return the modified frame @@ -107,14 +96,14 @@ } // Generate JSON string of this object -string Pixelate::Json() { +std::string Pixelate::Json() const { // Return formatted string return JsonValue().toStyledString(); } -// Generate Json::JsonValue for this object -Json::Value Pixelate::JsonValue() { +// Generate Json::Value for this object +Json::Value Pixelate::JsonValue() const { // Create root json object Json::Value root = EffectBase::JsonValue(); // get parent properties @@ -130,30 +119,24 @@ } // Load JSON string into this object -void Pixelate::SetJson(string value) { +void Pixelate::SetJson(const std::string value) { // Parse JSON string into JSON objects - Json::Value root; - Json::Reader reader; - bool success = reader.parse( value, root ); - if (!success) - // Raise exception - throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); - try { + const Json::Value root = openshot::stringToJson(value); // Set all values that match SetJsonValue(root); } - catch (exception e) + catch (const std::exception& e) { // Error parsing JSON (or missing keys) - throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); } } -// Load Json::JsonValue into this object -void Pixelate::SetJsonValue(Json::Value root) { +// Load Json::Value into this object +void Pixelate::SetJsonValue(const Json::Value root) { // Set parent data EffectBase::SetJsonValue(root); @@ -172,7 +155,7 @@ } // Get all properties for a specific frame -string Pixelate::PropertiesJSON(int64_t requested_frame) { +std::string Pixelate::PropertiesJSON(int64_t requested_frame) const { // Generate JSON properties list Json::Value root; @@ -193,4 +176,3 @@ // Return formatted string return root.toStyledString(); } - diff -Nru libopenshot-0.2.2+dfsg1/src/effects/Saturation.cpp libopenshot-0.2.5+dfsg1/src/effects/Saturation.cpp --- libopenshot-0.2.2+dfsg1/src/effects/Saturation.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/effects/Saturation.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for Saturation class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -66,44 +69,36 @@ if (!frame_image) return frame; + int pixel_count = frame_image->width() * frame_image->height(); + // Get keyframe values for this frame float saturation_value = saturation.GetValue(frame_number); // Constants used for color saturation formula - double pR = .299; - double pG = .587; - double pB = .114; + const double pR = .299; + const double pG = .587; + const double pB = .114; // Loop through pixels unsigned char *pixels = (unsigned char *) frame_image->bits(); - for (int pixel = 0, byte_index=0; pixel < frame_image->width() * frame_image->height(); pixel++, byte_index+=4) + + #pragma omp parallel for shared (pixels) + for (int pixel = 0; pixel < pixel_count; ++pixel) { // Get the RGB values from the pixel - int R = pixels[byte_index]; - int G = pixels[byte_index + 1]; - int B = pixels[byte_index + 2]; - int A = pixels[byte_index + 3]; + int R = pixels[pixel * 4]; + int G = pixels[pixel * 4 + 1]; + int B = pixels[pixel * 4 + 2]; // Calculate the saturation multiplier double p = sqrt( (R * R * pR) + - (G * G * pG) + - (B * B * pB) ); + (G * G * pG) + + (B * B * pB) ); - // Adjust the saturation - R = p + (R - p) * saturation_value; - G = p + (G - p) * saturation_value; - B = p + (B - p) * saturation_value; - - // Constrain the value from 0 to 255 - R = constrain(R); - G = constrain(G); - B = constrain(B); - - // Set all pixels to new value - pixels[byte_index] = R; - pixels[byte_index + 1] = G; - pixels[byte_index + 2] = B; - pixels[byte_index + 3] = A; // leave the alpha value alone + // Apply adjusted and constrained saturation + pixels[pixel * 4] = constrain(p + (R - p) * saturation_value); + pixels[pixel * 4 + 1] = constrain(p + (G - p) * saturation_value); + pixels[pixel * 4 + 2] = constrain(p + (B - p) * saturation_value); } // return the modified frame @@ -111,14 +106,14 @@ } // Generate JSON string of this object -string Saturation::Json() { +std::string Saturation::Json() const { // Return formatted string return JsonValue().toStyledString(); } -// Generate Json::JsonValue for this object -Json::Value Saturation::JsonValue() { +// Generate Json::Value for this object +Json::Value Saturation::JsonValue() const { // Create root json object Json::Value root = EffectBase::JsonValue(); // get parent properties @@ -130,30 +125,24 @@ } // Load JSON string into this object -void Saturation::SetJson(string value) { +void Saturation::SetJson(const std::string value) { // Parse JSON string into JSON objects - Json::Value root; - Json::Reader reader; - bool success = reader.parse( value, root ); - if (!success) - // Raise exception - throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); - try { + const Json::Value root = openshot::stringToJson(value); // Set all values that match SetJsonValue(root); } - catch (exception e) + catch (const std::exception& e) { // Error parsing JSON (or missing keys) - throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); } } -// Load Json::JsonValue into this object -void Saturation::SetJsonValue(Json::Value root) { +// Load Json::Value into this object +void Saturation::SetJsonValue(const Json::Value root) { // Set parent data EffectBase::SetJsonValue(root); @@ -164,7 +153,7 @@ } // Get all properties for a specific frame -string Saturation::PropertiesJSON(int64_t requested_frame) { +std::string Saturation::PropertiesJSON(int64_t requested_frame) const { // Generate JSON properties list Json::Value root; @@ -181,4 +170,3 @@ // Return formatted string return root.toStyledString(); } - diff -Nru libopenshot-0.2.2+dfsg1/src/effects/Shift.cpp libopenshot-0.2.5+dfsg1/src/effects/Shift.cpp --- libopenshot-0.2.2+dfsg1/src/effects/Shift.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/effects/Shift.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for Shift effect class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -66,9 +69,9 @@ // Get the current shift amount, and clamp to range (-1 to 1 range) double x_shift = x.GetValue(frame_number); - double x_shift_limit = fmod(abs(x_shift), 1.0); + double x_shift_limit = fmod(fabs(x_shift), 1.0); double y_shift = y.GetValue(frame_number); - double y_shift_limit = fmod(abs(y_shift), 1.0); + double y_shift_limit = fmod(fabs(y_shift), 1.0); // Declare temp arrays to hold pixels while we move things around unsigned char *temp_row = new unsigned char[frame_image->width() * 4](); @@ -130,14 +133,14 @@ } // Generate JSON string of this object -string Shift::Json() { +std::string Shift::Json() const { // Return formatted string return JsonValue().toStyledString(); } -// Generate Json::JsonValue for this object -Json::Value Shift::JsonValue() { +// Generate Json::Value for this object +Json::Value Shift::JsonValue() const { // Create root json object Json::Value root = EffectBase::JsonValue(); // get parent properties @@ -150,30 +153,24 @@ } // Load JSON string into this object -void Shift::SetJson(string value) { +void Shift::SetJson(const std::string value) { // Parse JSON string into JSON objects - Json::Value root; - Json::Reader reader; - bool success = reader.parse( value, root ); - if (!success) - // Raise exception - throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); - try { + const Json::Value root = openshot::stringToJson(value); // Set all values that match SetJsonValue(root); } - catch (exception e) + catch (const std::exception& e) { // Error parsing JSON (or missing keys) - throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); } } -// Load Json::JsonValue into this object -void Shift::SetJsonValue(Json::Value root) { +// Load Json::Value into this object +void Shift::SetJsonValue(const Json::Value root) { // Set parent data EffectBase::SetJsonValue(root); @@ -186,7 +183,7 @@ } // Get all properties for a specific frame -string Shift::PropertiesJSON(int64_t requested_frame) { +std::string Shift::PropertiesJSON(int64_t requested_frame) const { // Generate JSON properties list Json::Value root; @@ -204,4 +201,3 @@ // Return formatted string return root.toStyledString(); } - diff -Nru libopenshot-0.2.2+dfsg1/src/effects/Wave.cpp libopenshot-0.2.5+dfsg1/src/effects/Wave.cpp --- libopenshot-0.2.2+dfsg1/src/effects/Wave.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/effects/Wave.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for Wave effect class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -65,15 +68,13 @@ // Get the frame's image std::shared_ptr frame_image = frame->GetImage(); - // Get pixels for frame image + // Get original pixels for frame image, and also make a copy for editing + const unsigned char *original_pixels = (unsigned char *) frame_image->constBits(); unsigned char *pixels = (unsigned char *) frame_image->bits(); - - // Make temp copy of pixels before we start changing them - unsigned char *temp_image = new unsigned char[frame_image->width() * frame_image->height() * 4](); - memcpy(temp_image, pixels, sizeof(char) * frame_image->width() * frame_image->height() * 4); + int pixel_count = frame_image->width() * frame_image->height(); // Get current keyframe values - double time = frame_number;//abs(((frame_number + 255) % 510) - 255); + double time = frame_number; double wavelength_value = wavelength.GetValue(frame_number); double amplitude_value = amplitude.GetValue(frame_number); double multiplier_value = multiplier.GetValue(frame_number); @@ -81,43 +82,41 @@ double speed_y_value = speed_y.GetValue(frame_number); // Loop through pixels - for (int pixel = 0, byte_index=0; pixel < frame_image->width() * frame_image->height(); pixel++, byte_index+=4) + #pragma omp parallel for + for (int pixel = 0; pixel < pixel_count; ++pixel) { - // Calculate X and Y pixel coordinates + // Calculate pixel Y value int Y = pixel / frame_image->width(); // Calculate wave pixel offsets - float noiseVal = (100 + Y * 0.001) * multiplier_value; // Time and time multiplier (to make the wave move) - float noiseAmp = noiseVal * amplitude_value; // Apply amplitude / height of the wave - float waveformVal = sin((Y * wavelength_value) + (time * speed_y_value)); // Waveform algorithm on y-axis - float waveVal = (waveformVal + shift_x_value) * noiseAmp; // Shifts pixels on the x-axis - - int source_X = round(pixel + waveVal) * 4; - if (source_X < 0) - source_X = 0; - if (source_X > frame_image->width() * frame_image->height() * 4 * sizeof(char)) - source_X = (frame_image->width() * frame_image->height() * 4 * sizeof(char)) - (sizeof(char) * 4); + float noiseVal = (100 + Y * 0.001) * multiplier_value; // Time and time multiplier (to make the wave move) + float noiseAmp = noiseVal * amplitude_value; // Apply amplitude / height of the wave + float waveformVal = sin((Y * wavelength_value) + (time * speed_y_value)); // Waveform algorithm on y-axis + float waveVal = (waveformVal + shift_x_value) * noiseAmp; // Shifts pixels on the x-axis + + long unsigned int source_px = round(pixel + waveVal); + if (source_px < 0) + source_px = 0; + if (source_px >= pixel_count) + source_px = pixel_count - 1; // Calculate source array location, and target array location, and copy the 4 color values - memcpy(&pixels[byte_index], &temp_image[source_X], sizeof(char) * 4); + memcpy(&pixels[pixel * 4], &original_pixels[source_px * 4], sizeof(char) * 4); } - // Delete arrays - delete[] temp_image; - // return the modified frame return frame; } // Generate JSON string of this object -string Wave::Json() { +std::string Wave::Json() const { // Return formatted string return JsonValue().toStyledString(); } -// Generate Json::JsonValue for this object -Json::Value Wave::JsonValue() { +// Generate Json::Value for this object +Json::Value Wave::JsonValue() const { // Create root json object Json::Value root = EffectBase::JsonValue(); // get parent properties @@ -133,30 +132,24 @@ } // Load JSON string into this object -void Wave::SetJson(string value) { +void Wave::SetJson(const std::string value) { // Parse JSON string into JSON objects - Json::Value root; - Json::Reader reader; - bool success = reader.parse( value, root ); - if (!success) - // Raise exception - throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); - try { + const Json::Value root = openshot::stringToJson(value); // Set all values that match SetJsonValue(root); } - catch (exception e) + catch (const std::exception& e) { // Error parsing JSON (or missing keys) - throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); } } -// Load Json::JsonValue into this object -void Wave::SetJsonValue(Json::Value root) { +// Load Json::Value into this object +void Wave::SetJsonValue(const Json::Value root) { // Set parent data EffectBase::SetJsonValue(root); @@ -175,7 +168,7 @@ } // Get all properties for a specific frame -string Wave::PropertiesJSON(int64_t requested_frame) { +std::string Wave::PropertiesJSON(int64_t requested_frame) const { // Generate JSON properties list Json::Value root; @@ -196,4 +189,3 @@ // Return formatted string return root.toStyledString(); } - diff -Nru libopenshot-0.2.2+dfsg1/src/examples/ExampleBlackmagic.cpp libopenshot-0.2.5+dfsg1/src/examples/ExampleBlackmagic.cpp --- libopenshot-0.2.2+dfsg1/src/examples/ExampleBlackmagic.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/examples/ExampleBlackmagic.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for Main_Blackmagic class (live greenscreen example app) * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the diff -Nru libopenshot-0.2.2+dfsg1/src/examples/Example.cpp libopenshot-0.2.5+dfsg1/src/examples/Example.cpp --- libopenshot-0.2.2+dfsg1/src/examples/Example.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/examples/Example.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for Example Executable (example app for libopenshot) * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -36,12 +39,19 @@ int main(int argc, char* argv[]) { - FFmpegReader r9("/home/jonathan/Videos/sintel_trailer-720p.mp4"); + Settings *s = Settings::Instance(); + s->HARDWARE_DECODER = 2; // 1 VA-API, 2 NVDEC, 6 VDPAU + s->HW_DE_DEVICE_SET = 0; + + std::string input_filepath = TEST_MEDIA_PATH; + input_filepath += "sintel_trailer-720p.mp4"; + + FFmpegReader r9(input_filepath); r9.Open(); r9.DisplayInfo(); /* WRITER ---------------- */ - FFmpegWriter w9("/home/jonathan/metadata.mp4"); + FFmpegWriter w9("metadata.mp4"); // Set options w9.SetAudioOptions(true, "libmp3lame", r9.info.sample_rate, r9.info.channels, r9.info.channel_layout, 128000); @@ -73,7 +83,7 @@ // Close timeline r9.Close(); - cout << "Completed successfully!" << endl; + std::cout << "Completed successfully!" << std::endl; return 0; -} \ No newline at end of file +} diff -Nru libopenshot-0.2.2+dfsg1/src/examples/ExampleHtml.cpp libopenshot-0.2.5+dfsg1/src/examples/ExampleHtml.cpp --- libopenshot-0.2.2+dfsg1/src/examples/ExampleHtml.cpp 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/examples/ExampleHtml.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -0,0 +1,103 @@ +/** + * @file + * @brief Source file for QtHtmlReader Example (example app for libopenshot) + * @author Jonathan Thomas + * @author FeRD (Frank Dana) + * + * @ref License + */ + +/* LICENSE + * + * Copyright (c) 2008-2019 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#include +#include +#include +#include "../../include/OpenShot.h" +//#include "../../include/CrashHandler.h" +#include +#include + +using namespace openshot; + +int main(int argc, char* argv[]) { + + QGuiApplication app(argc, argv); + + std::string html_code = R"html(

Check out this HTML!

)html"; + + std::string css_code = R"css( + * {font-family:sans-serif; font-size:18pt; color:#ffffff;} + #red {color: #ff0000;} + )css"; + +// Create a reader to generate an openshot::Frame containing text +QtHtmlReader r(1280, // width + 720, // height + -16, // x_offset + -16, // y_offset + GRAVITY_BOTTOM_RIGHT, // gravity + html_code, // html + css_code, // css + "#000000" // background_color + ); + + r.Open(); // Open the reader + + r.DisplayInfo(); + + /* WRITER ---------------- */ + FFmpegWriter w("cppHtmlExample.mp4"); + + // Set options + //w.SetAudioOptions(true, "libmp3lame", r.info.sample_rate, r.info.channels, r.info.channel_layout, 128000); + w.SetVideoOptions(true, "libx264", Fraction(30000, 1000), 1280, 720, Fraction(1, 1), false, false, 3000000); + + w.info.metadata["title"] = "testtest"; + w.info.metadata["artist"] = "aaa"; + w.info.metadata["album"] = "bbb"; + w.info.metadata["year"] = "2015"; + w.info.metadata["description"] = "ddd"; + w.info.metadata["comment"] = "eee"; + w.info.metadata["comment"] = "comment"; + w.info.metadata["copyright"] = "copyright OpenShot!"; + + // Open writer + w.Open(); + + for (long int frame = 1; frame <= 100; ++frame) + { + std::shared_ptr f = r.GetFrame(frame); // Same frame every time + w.WriteFrame(f); + } + + // Close writer & reader + w.Close(); + r.Close(); + + // Set a timer with 0 timeout to terminate immediately after + // processing events + QTimer::singleShot(0, &app, SLOT(quit())); + + // Run QGuiApplication to completion + return app.exec(); +} diff -Nru libopenshot-0.2.2+dfsg1/src/examples/ExampleHtml.py libopenshot-0.2.5+dfsg1/src/examples/ExampleHtml.py --- libopenshot-0.2.2+dfsg1/src/examples/ExampleHtml.py 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/examples/ExampleHtml.py 2020-03-03 08:00:06.000000000 +0000 @@ -0,0 +1,93 @@ +#!/usr/bin/env python3 + +""" + @file + @brief Python source file for QtHtmlReader example + @author Jonathan Thomas + @author FeRD (Frank Dana) + + @ref License +""" + +# LICENSE +# +# Copyright (c) 2008-2019 OpenShot Studios, LLC +# . This file is part of +# OpenShot Library (libopenshot), an open-source project dedicated to +# delivering high quality video editing and animation solutions to the +# world. For more information visit . +# +# OpenShot Library (libopenshot) is free software: you can redistribute it +# and/or modify it under the terms of the GNU Lesser General Public License +# as published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# OpenShot Library (libopenshot) is distributed in the hope that it will be +# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with OpenShot Library. If not, see . + +import sys +from PyQt5.QtCore import QTimer +from PyQt5.QtGui import QGuiApplication +import openshot + +app = QGuiApplication(sys.argv) + +html_code = """

Check out this HTML!

""" + +css_code = """ + * {font-family:sans-serif; font-size:18pt; color:#ffffff;} + #red {color: #ff0000;} +""" + +# Create a QtHtmlReader +r = openshot.QtHtmlReader(1280, # width + 720, # height + -16, # x offset + -16, # y offset + openshot.GRAVITY_BOTTOM_RIGHT, + html_code, + css_code, + "#000000" # background color + ) + +r.Open() # Open the reader + +r.DisplayInfo() # Display metadata + +# Set up Writer +w = openshot.FFmpegWriter("pyHtmlExample.mp4") + +w.SetVideoOptions(True, "libx264", openshot.Fraction(30000, 1000), 1280, 720, + openshot.Fraction(1, 1), False, False, 3000000) + +w.info.metadata["title"] = "testtest" +w.info.metadata["artist"] = "aaa" +w.info.metadata["album"] = "bbb" +w.info.metadata["year"] = "2015" +w.info.metadata["description"] = "ddd" +w.info.metadata["comment"] = "eee" +w.info.metadata["comment"] = "comment" +w.info.metadata["copyright"] = "copyright OpenShot!" + +# Open the Writer +w.Open() + +# Grab 30 frames from Reader and encode to Writer +for frame in range(100): + f = r.GetFrame(frame) + w.WriteFrame(f) + +# Close out Reader & Writer +w.Close() +r.Close() + +# Set a timer to terminate the app as soon as the event queue empties +QTimer.singleShot(0, app.quit) + +# Run QGuiApplication to completion +sys.exit(app.exec()) diff -Nru libopenshot-0.2.2+dfsg1/src/examples/Example.py libopenshot-0.2.5+dfsg1/src/examples/Example.py --- libopenshot-0.2.2+dfsg1/src/examples/Example.py 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/examples/Example.py 2020-03-03 08:00:06.000000000 +0000 @@ -0,0 +1,76 @@ +#!/usr/bin/env python3 + +""" + @file + @brief Python source file for openshot.py example + @author Jonathan Thomas + @author FeRD (Frank Dana) + + @ref License +""" + +# LICENSE +# +# Copyright (c) 2008-2019 OpenShot Studios, LLC +# . This file is part of +# OpenShot Library (libopenshot), an open-source project dedicated to +# delivering high quality video editing and animation solutions to the +# world. For more information visit . +# +# OpenShot Library (libopenshot) is free software: you can redistribute it +# and/or modify it under the terms of the GNU Lesser General Public License +# as published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# OpenShot Library (libopenshot) is distributed in the hope that it will be +# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with OpenShot Library. If not, see . + +# This can be run against an uninstalled build of libopenshot, just set the +# environment variable PYTHONPATH to the location of the Python bindings. +# +# For example: +# $ PYTHONPATH=../../build/src/bindings/python python3 Example.py +# +import openshot + + +# Create an FFmpegReader +r = openshot.FFmpegReader("sintel_trailer-720p.mp4") + +r.Open() # Open the reader +r.DisplayInfo() # Display metadata + +# Set up Writer +w = openshot.FFmpegWriter("pythonExample.mp4") + +w.SetAudioOptions(True, "libmp3lame", r.info.sample_rate, r.info.channels, r.info.channel_layout, 128000) +w.SetVideoOptions(True, "libx264", openshot.Fraction(30000, 1000), 1280, 720, + openshot.Fraction(1, 1), False, False, 3000000) + +w.info.metadata["title"] = "testtest" +w.info.metadata["artist"] = "aaa" +w.info.metadata["album"] = "bbb" +w.info.metadata["year"] = "2015" +w.info.metadata["description"] = "ddd" +w.info.metadata["comment"] = "eee" +w.info.metadata["comment"] = "comment" +w.info.metadata["copyright"] = "copyright OpenShot!" + +# Open the Writer +w.Open() + +# Grab 30 frames from Reader and encode to Writer +for frame in range(100): + f = r.GetFrame(frame) + w.WriteFrame(f) + +# Close out Reader & Writer +w.Close() +r.Close() + +print("Completed successfully!") diff -Nru "/tmp/tmpB8oP61/DPojWwIASv/libopenshot-0.2.2+dfsg1/src/examples/OpenShot Wipe Tests.py" "/tmp/tmpB8oP61/n7jS_zyRsr/libopenshot-0.2.5+dfsg1/src/examples/OpenShot Wipe Tests.py" --- "/tmp/tmpB8oP61/DPojWwIASv/libopenshot-0.2.2+dfsg1/src/examples/OpenShot Wipe Tests.py" 2018-09-22 19:47:46.000000000 +0000 +++ "/tmp/tmpB8oP61/n7jS_zyRsr/libopenshot-0.2.5+dfsg1/src/examples/OpenShot Wipe Tests.py" 2020-03-03 08:00:06.000000000 +0000 @@ -1,16 +1,17 @@ import openshot -# Create a empty clip -t = openshot.Timeline(720, 480, openshot.Fraction(24,1), 44100, 2) +# Create an empty timeline +t = openshot.Timeline(720, 480, openshot.Fraction(24,1), 44100, 2, openshot.LAYOUT_STEREO) +t.Open() # lower layer -lower = openshot.ImageReader("/home/jonathan/apps/libopenshot/src/examples/back.png") +lower = openshot.QtImageReader("back.png") c1 = openshot.Clip(lower) c1.Layer(1) t.AddClip(c1) # higher layer -higher = openshot.ImageReader("/home/jonathan/apps/libopenshot/src/examples/front3.png") +higher = openshot.QtImageReader("front3.png") c2 = openshot.Clip(higher) c2.Layer(2) #c2.alpha = openshot.Keyframe(0.5) @@ -18,25 +19,26 @@ # Wipe / Transition brightness = openshot.Keyframe() -brightness.AddPoint(1, 100.0, openshot.BEZIER) -brightness.AddPoint(24, -100.0, openshot.BEZIER) +brightness.AddPoint(1, 1.0, openshot.BEZIER) +brightness.AddPoint(24, -1.0, openshot.BEZIER) contrast = openshot.Keyframe() contrast.AddPoint(1, 20.0, openshot.BEZIER) contrast.AddPoint(24, 20.0, openshot.BEZIER) -e = openshot.Wipe("/home/jonathan/apps/libopenshot/src/examples/mask.png", brightness, contrast) +reader = openshot.QtImageReader("mask.png") +e = openshot.Mask(reader, brightness, contrast) e.Layer(2) e.End(60) t.AddEffect(e) -e1 = openshot.Wipe("/home/jonathan/apps/libopenshot/src/examples/mask2.png", brightness, contrast) +reader1 = openshot.QtImageReader("mask2.png") +e1 = openshot.Mask(reader1, brightness, contrast) e1.Layer(2) e1.Order(2) e1.End(60) #t.AddEffect(e1) - for n in range(1,25): - print n + print(n, end=" ", flush=1) t.GetFrame(n).Save("%s.png" % n, 1.0) diff -Nru libopenshot-0.2.2+dfsg1/src/FFmpegReader.cpp libopenshot-0.2.5+dfsg1/src/FFmpegReader.cpp --- libopenshot-0.2.2+dfsg1/src/FFmpegReader.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/FFmpegReader.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for FFmpegReader class * @author Jonathan Thomas , Fabrice Bellard * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2013 OpenShot Studios, LLC, Fabrice Bellard + * Copyright (c) 2008-2019 OpenShot Studios, LLC, Fabrice Bellard * (http://www.openshotstudios.com). This file is part of * OpenShot Library (http://www.openshot.org), an open-source project * dedicated to delivering high quality video editing and animation solutions @@ -30,15 +33,63 @@ #include "../include/FFmpegReader.h" +#define ENABLE_VAAPI 0 + +#if HAVE_HW_ACCEL +#pragma message "You are compiling with experimental hardware decode" +#else +#pragma message "You are compiling only with software decode" +#endif + +#if HAVE_HW_ACCEL +#define MAX_SUPPORTED_WIDTH 1950 +#define MAX_SUPPORTED_HEIGHT 1100 + +#if ENABLE_VAAPI +#include "libavutil/hwcontext_vaapi.h" + +typedef struct VAAPIDecodeContext { + VAProfile va_profile; + VAEntrypoint va_entrypoint; + VAConfigID va_config; + VAContextID va_context; + +#if FF_API_STRUCT_VAAPI_CONTEXT + // FF_DISABLE_DEPRECATION_WARNINGS + int have_old_context; + struct vaapi_context *old_context; + AVBufferRef *device_ref; + // FF_ENABLE_DEPRECATION_WARNINGS +#endif + + AVHWDeviceContext *device; + AVVAAPIDeviceContext *hwctx; + + AVHWFramesContext *frames; + AVVAAPIFramesContext *hwfc; + + enum AVPixelFormat surface_format; + int surface_count; + } VAAPIDecodeContext; +#endif // ENABLE_VAAPI +#endif // HAVE_HW_ACCEL + + using namespace openshot; -FFmpegReader::FFmpegReader(string path) - : last_frame(0), is_seeking(0), seeking_pts(0), seeking_frame(0), seek_count(0), - audio_pts_offset(99999), video_pts_offset(99999), path(path), is_video_seek(true), check_interlace(false), - check_fps(false), enable_seek(true), is_open(false), seek_audio_frame_found(0), seek_video_frame_found(0), - prev_samples(0), prev_pts(0), pts_total(0), pts_counter(0), is_duration_known(false), largest_frame_processed(0), - current_video_frame(0), has_missing_frames(false), num_packets_since_video_frame(0), num_checks_since_final(0), - packet(NULL), use_omp_threads(true) { +int hw_de_on = 0; +#if HAVE_HW_ACCEL + AVPixelFormat hw_de_av_pix_fmt_global = AV_PIX_FMT_NONE; + AVHWDeviceType hw_de_av_device_type_global = AV_HWDEVICE_TYPE_NONE; +#endif + +FFmpegReader::FFmpegReader(std::string path) + : last_frame(0), is_seeking(0), seeking_pts(0), seeking_frame(0), seek_count(0), + audio_pts_offset(99999), video_pts_offset(99999), path(path), is_video_seek(true), check_interlace(false), + check_fps(false), enable_seek(true), is_open(false), seek_audio_frame_found(0), seek_video_frame_found(0), + prev_samples(0), prev_pts(0), pts_total(0), pts_counter(0), is_duration_known(false), largest_frame_processed(0), + current_video_frame(0), has_missing_frames(false), num_packets_since_video_frame(0), num_checks_since_final(0), + packet(NULL) { // Initialize FFMpeg, and register all formats and codecs AV_REGISTER_ALL @@ -49,18 +100,18 @@ missing_frames.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 2, info.width, info.height, info.sample_rate, info.channels); final_cache.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 2, info.width, info.height, info.sample_rate, info.channels); - // Open and Close the reader, to populate it's attributes (such as height, width, etc...) + // Open and Close the reader, to populate its attributes (such as height, width, etc...) Open(); Close(); } -FFmpegReader::FFmpegReader(string path, bool inspect_reader) +FFmpegReader::FFmpegReader(std::string path, bool inspect_reader) : last_frame(0), is_seeking(0), seeking_pts(0), seeking_frame(0), seek_count(0), audio_pts_offset(99999), video_pts_offset(99999), path(path), is_video_seek(true), check_interlace(false), check_fps(false), enable_seek(true), is_open(false), seek_audio_frame_found(0), seek_video_frame_found(0), prev_samples(0), prev_pts(0), pts_total(0), pts_counter(0), is_duration_known(false), largest_frame_processed(0), current_video_frame(0), has_missing_frames(false), num_packets_since_video_frame(0), num_checks_since_final(0), - packet(NULL), use_omp_threads(true) { + packet(NULL) { // Initialize FFMpeg, and register all formats and codecs AV_REGISTER_ALL @@ -71,7 +122,7 @@ missing_frames.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 2, info.width, info.height, info.sample_rate, info.channels); final_cache.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 2, info.width, info.height, info.sample_rate, info.channels); - // Open and Close the reader, to populate it's attributes (such as height, width, etc...) + // Open and Close the reader, to populate its attributes (such as height, width, etc...) if (inspect_reader) { Open(); Close(); @@ -85,8 +136,7 @@ } // This struct holds the associated video frame and starting sample # for an audio packet. -bool AudioLocation::is_near(AudioLocation location, int samples_per_frame, int64_t amount) -{ +bool AudioLocation::is_near(AudioLocation location, int samples_per_frame, int64_t amount) { // Is frame even close to this one? if (abs(location.frame - frame) >= 2) // This is too far away to be considered @@ -103,13 +153,97 @@ return false; } -void FFmpegReader::Open() +#if HAVE_HW_ACCEL + +// Get hardware pix format +static enum AVPixelFormat get_hw_dec_format(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) +{ + const enum AVPixelFormat *p; + + for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) { + switch (*p) { +#if defined(__linux__) + // Linux pix formats + case AV_PIX_FMT_VAAPI: + hw_de_av_pix_fmt_global = AV_PIX_FMT_VAAPI; + hw_de_av_device_type_global = AV_HWDEVICE_TYPE_VAAPI; + return *p; + break; + case AV_PIX_FMT_VDPAU: + hw_de_av_pix_fmt_global = AV_PIX_FMT_VDPAU; + hw_de_av_device_type_global = AV_HWDEVICE_TYPE_VDPAU; + return *p; + break; +#endif +#if defined(_WIN32) + // Windows pix formats + case AV_PIX_FMT_DXVA2_VLD: + hw_de_av_pix_fmt_global = AV_PIX_FMT_DXVA2_VLD; + hw_de_av_device_type_global = AV_HWDEVICE_TYPE_DXVA2; + return *p; + break; + case AV_PIX_FMT_D3D11: + hw_de_av_pix_fmt_global = AV_PIX_FMT_D3D11; + hw_de_av_device_type_global = AV_HWDEVICE_TYPE_D3D11VA; + return *p; + break; +#endif +#if defined(__APPLE__) + // Apple pix formats + case AV_PIX_FMT_VIDEOTOOLBOX: + hw_de_av_pix_fmt_global = AV_PIX_FMT_VIDEOTOOLBOX; + hw_de_av_device_type_global = AV_HWDEVICE_TYPE_VIDEOTOOLBOX; + return *p; + break; +#endif + // Cross-platform pix formats + case AV_PIX_FMT_CUDA: + hw_de_av_pix_fmt_global = AV_PIX_FMT_CUDA; + hw_de_av_device_type_global = AV_HWDEVICE_TYPE_CUDA; + return *p; + break; + case AV_PIX_FMT_QSV: + hw_de_av_pix_fmt_global = AV_PIX_FMT_QSV; + hw_de_av_device_type_global = AV_HWDEVICE_TYPE_QSV; + return *p; + break; + default: + // This is only here to silence unused-enum warnings + break; + } + } + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::get_hw_dec_format (Unable to decode this file using hardware decode)"); + return AV_PIX_FMT_NONE; +} + +int FFmpegReader::IsHardwareDecodeSupported(int codecid) { + int ret; + switch (codecid) { + case AV_CODEC_ID_H264: + case AV_CODEC_ID_MPEG2VIDEO: + case AV_CODEC_ID_VC1: + case AV_CODEC_ID_WMV1: + case AV_CODEC_ID_WMV2: + case AV_CODEC_ID_WMV3: + ret = 1; + break; + default : + ret = 0; + break; + } + return ret; +} +#endif // HAVE_HW_ACCEL + +void FFmpegReader::Open() { // Open reader if not already open - if (!is_open) - { + if (!is_open) { // Initialize format context pFormatCtx = NULL; + { + hw_de_on = (openshot::Settings::Instance()->HARDWARE_DECODER == 0 ? 0 : 1); + } // Open video file if (avformat_open_input(&pFormatCtx, path.c_str(), NULL, NULL) != 0) @@ -122,8 +256,7 @@ videoStream = -1; audioStream = -1; // Loop through each stream, and identify the video and audio stream index - for (unsigned int i = 0; i < pFormatCtx->nb_streams; i++) - { + for (unsigned int i = 0; i < pFormatCtx->nb_streams; i++) { // Is this a video stream? if (AV_GET_CODEC_TYPE(pFormatCtx->streams[i]) == AVMEDIA_TYPE_VIDEO && videoStream < 0) { videoStream = i; @@ -137,8 +270,7 @@ throw NoStreamsFound("No video or audio streams found in this file.", path); // Is there a video stream? - if (videoStream != -1) - { + if (videoStream != -1) { // Set the stream index info.video_stream_index = videoStream; @@ -150,23 +282,232 @@ // Get codec and codec context from stream AVCodec *pCodec = avcodec_find_decoder(codecId); - pCodecCtx = AV_GET_CODEC_CONTEXT(pStream, pCodec); + AVDictionary *opts = NULL; + int retry_decode_open = 2; + // If hw accel is selected but hardware cannot handle repeat with software decoding + do { + pCodecCtx = AV_GET_CODEC_CONTEXT(pStream, pCodec); +#if HAVE_HW_ACCEL + if (hw_de_on && (retry_decode_open==2)) { + // Up to here no decision is made if hardware or software decode + hw_de_supported = IsHardwareDecodeSupported(pCodecCtx->codec_id); + } +#endif + retry_decode_open = 0; - // Set number of threads equal to number of processors (not to exceed 16) - pCodecCtx->thread_count = min(FF_NUM_PROCESSORS, 16); + // Set number of threads equal to number of processors (not to exceed 16) + pCodecCtx->thread_count = std::min(FF_NUM_PROCESSORS, 16); - if (pCodec == NULL) { - throw InvalidCodec("A valid video codec could not be found for this file.", path); - } + if (pCodec == NULL) { + throw InvalidCodec("A valid video codec could not be found for this file.", path); + } - // Init options - AVDictionary *opts = NULL; - av_dict_set(&opts, "strict", "experimental", 0); + // Init options + av_dict_set(&opts, "strict", "experimental", 0); +#if HAVE_HW_ACCEL + if (hw_de_on && hw_de_supported) { + // Open Hardware Acceleration + int i_decoder_hw = 0; + char adapter[256]; + char *adapter_ptr = NULL; + int adapter_num; + adapter_num = openshot::Settings::Instance()->HW_DE_DEVICE_SET; + fprintf(stderr, "Hardware decoding device number: %d\n", adapter_num); + + // Set hardware pix format (callback) + pCodecCtx->get_format = get_hw_dec_format; + + if (adapter_num < 3 && adapter_num >=0) { +#if defined(__linux__) + snprintf(adapter,sizeof(adapter),"/dev/dri/renderD%d", adapter_num+128); + adapter_ptr = adapter; + i_decoder_hw = openshot::Settings::Instance()->HARDWARE_DECODER; + switch (i_decoder_hw) { + case 1: + hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; + break; + case 2: + hw_de_av_device_type = AV_HWDEVICE_TYPE_CUDA; + break; + case 6: + hw_de_av_device_type = AV_HWDEVICE_TYPE_VDPAU; + break; + case 7: + hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; + break; + default: + hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; + break; + } + +#elif defined(_WIN32) + adapter_ptr = NULL; + i_decoder_hw = openshot::Settings::Instance()->HARDWARE_DECODER; + switch (i_decoder_hw) { + case 2: + hw_de_av_device_type = AV_HWDEVICE_TYPE_CUDA; + break; + case 3: + hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2; + break; + case 4: + hw_de_av_device_type = AV_HWDEVICE_TYPE_D3D11VA; + break; + case 7: + hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; + break; + default: + hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2; + break; + } +#elif defined(__APPLE__) + adapter_ptr = NULL; + i_decoder_hw = openshot::Settings::Instance()->HARDWARE_DECODER; + switch (i_decoder_hw) { + case 5: + hw_de_av_device_type = AV_HWDEVICE_TYPE_VIDEOTOOLBOX; + break; + case 7: + hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; + break; + default: + hw_de_av_device_type = AV_HWDEVICE_TYPE_VIDEOTOOLBOX; + break; + } +#endif + + } else { + adapter_ptr = NULL; // Just to be sure + } + + // Check if it is there and writable +#if defined(__linux__) + if( adapter_ptr != NULL && access( adapter_ptr, W_OK ) == 0 ) { +#elif defined(_WIN32) + if( adapter_ptr != NULL ) { +#elif defined(__APPLE__) + if( adapter_ptr != NULL ) { +#endif + ZmqLogger::Instance()->AppendDebugMethod("Decode Device present using device"); + } + else { + adapter_ptr = NULL; // use default + ZmqLogger::Instance()->AppendDebugMethod("Decode Device not present using default"); + } + + hw_device_ctx = NULL; + // Here the first hardware initialisations are made + if (av_hwdevice_ctx_create(&hw_device_ctx, hw_de_av_device_type, adapter_ptr, NULL, 0) >= 0) { + if (!(pCodecCtx->hw_device_ctx = av_buffer_ref(hw_device_ctx))) { + throw InvalidCodec("Hardware device reference create failed.", path); + } + + /* + av_buffer_unref(&ist->hw_frames_ctx); + ist->hw_frames_ctx = av_hwframe_ctx_alloc(hw_device_ctx); + if (!ist->hw_frames_ctx) { + av_log(avctx, AV_LOG_ERROR, "Error creating a CUDA frames context\n"); + return AVERROR(ENOMEM); + } - // Open video codec - if (avcodec_open2(pCodecCtx, pCodec, &opts) < 0) - throw InvalidCodec("A video codec was found, but could not be opened.", path); + frames_ctx = (AVHWFramesContext*)ist->hw_frames_ctx->data; + frames_ctx->format = AV_PIX_FMT_CUDA; + frames_ctx->sw_format = avctx->sw_pix_fmt; + frames_ctx->width = avctx->width; + frames_ctx->height = avctx->height; + + av_log(avctx, AV_LOG_DEBUG, "Initializing CUDA frames context: sw_format = %s, width = %d, height = %d\n", + av_get_pix_fmt_name(frames_ctx->sw_format), frames_ctx->width, frames_ctx->height); + + + ret = av_hwframe_ctx_init(pCodecCtx->hw_device_ctx); + ret = av_hwframe_ctx_init(ist->hw_frames_ctx); + if (ret < 0) { + av_log(avctx, AV_LOG_ERROR, "Error initializing a CUDA frame pool\n"); + return ret; + } + */ + } + else { + throw InvalidCodec("Hardware device create failed.", path); + } + } +#endif // HAVE_HW_ACCEL + + // Open video codec + if (avcodec_open2(pCodecCtx, pCodec, &opts) < 0) + throw InvalidCodec("A video codec was found, but could not be opened.", path); + +#if HAVE_HW_ACCEL + if (hw_de_on && hw_de_supported) { + AVHWFramesConstraints *constraints = NULL; + void *hwconfig = NULL; + hwconfig = av_hwdevice_hwconfig_alloc(hw_device_ctx); + +// TODO: needs va_config! +#if ENABLE_VAAPI + ((AVVAAPIHWConfig *)hwconfig)->config_id = ((VAAPIDecodeContext *)(pCodecCtx->priv_data))->va_config; + constraints = av_hwdevice_get_hwframe_constraints(hw_device_ctx,hwconfig); +#endif // ENABLE_VAAPI + if (constraints) { + if (pCodecCtx->coded_width < constraints->min_width || + pCodecCtx->coded_height < constraints->min_height || + pCodecCtx->coded_width > constraints->max_width || + pCodecCtx->coded_height > constraints->max_height) { + ZmqLogger::Instance()->AppendDebugMethod("DIMENSIONS ARE TOO LARGE for hardware acceleration\n"); + hw_de_supported = 0; + retry_decode_open = 1; + AV_FREE_CONTEXT(pCodecCtx); + if (hw_device_ctx) { + av_buffer_unref(&hw_device_ctx); + hw_device_ctx = NULL; + } + } + else { + // All is just peachy + ZmqLogger::Instance()->AppendDebugMethod("\nDecode hardware acceleration is used\n", "Min width :", constraints->min_width, "Min Height :", constraints->min_height, "MaxWidth :", constraints->max_width, "MaxHeight :", constraints->max_height, "Frame width :", pCodecCtx->coded_width, "Frame height :", pCodecCtx->coded_height); + retry_decode_open = 0; + } + av_hwframe_constraints_free(&constraints); + if (hwconfig) { + av_freep(&hwconfig); + } + } + else { + int max_h, max_w; + //max_h = ((getenv( "LIMIT_HEIGHT_MAX" )==NULL) ? MAX_SUPPORTED_HEIGHT : atoi(getenv( "LIMIT_HEIGHT_MAX" ))); + max_h = openshot::Settings::Instance()->DE_LIMIT_HEIGHT_MAX; + //max_w = ((getenv( "LIMIT_WIDTH_MAX" )==NULL) ? MAX_SUPPORTED_WIDTH : atoi(getenv( "LIMIT_WIDTH_MAX" ))); + max_w = openshot::Settings::Instance()->DE_LIMIT_WIDTH_MAX; + ZmqLogger::Instance()->AppendDebugMethod("Constraints could not be found using default limit\n"); + //cerr << "Constraints could not be found using default limit\n"; + if (pCodecCtx->coded_width < 0 || + pCodecCtx->coded_height < 0 || + pCodecCtx->coded_width > max_w || + pCodecCtx->coded_height > max_h ) { + ZmqLogger::Instance()->AppendDebugMethod("DIMENSIONS ARE TOO LARGE for hardware acceleration\n", "Max Width :", max_w, "Max Height :", max_h, "Frame width :", pCodecCtx->coded_width, "Frame height :", pCodecCtx->coded_height); + hw_de_supported = 0; + retry_decode_open = 1; + AV_FREE_CONTEXT(pCodecCtx); + if (hw_device_ctx) { + av_buffer_unref(&hw_device_ctx); + hw_device_ctx = NULL; + } + } + else { + ZmqLogger::Instance()->AppendDebugMethod("\nDecode hardware acceleration is used\n", "Max Width :", max_w, "Max Height :", max_h, "Frame width :", pCodecCtx->coded_width, "Frame height :", pCodecCtx->coded_height); + retry_decode_open = 0; + } + } + } // if hw_de_on && hw_de_supported + else { + ZmqLogger::Instance()->AppendDebugMethod("\nDecode in software is used\n"); + } +#else + retry_decode_open = 0; +#endif // HAVE_HW_ACCEL + } while (retry_decode_open); // retry_decode_open // Free options av_dict_free(&opts); @@ -175,8 +516,7 @@ } // Is there an audio stream? - if (audioStream != -1) - { + if (audioStream != -1) { // Set the stream index info.audio_stream_index = audioStream; @@ -191,7 +531,7 @@ aCodecCtx = AV_GET_CODEC_CONTEXT(aStream, aCodec); // Set number of threads equal to number of processors (not to exceed 16) - aCodecCtx->thread_count = min(FF_NUM_PROCESSORS, 16); + aCodecCtx->thread_count = std::min(FF_NUM_PROCESSORS, 16); if (aCodec == NULL) { throw InvalidCodec("A valid audio codec could not be found for this file.", path); @@ -229,32 +569,39 @@ missing_frames.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 2, info.width, info.height, info.sample_rate, info.channels); final_cache.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 2, info.width, info.height, info.sample_rate, info.channels); - // Initialize OMP threading support - use_omp_threads = openshot::IsOMPEnabled(); - // Mark as "open" is_open = true; } } -void FFmpegReader::Close() -{ +void FFmpegReader::Close() { // Close all objects, if reader is 'open' - if (is_open) - { + if (is_open) { // Mark as "closed" is_open = false; - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::Close", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::Close"); + + if (packet) { + // Remove previous packet before getting next one + RemoveAVPacket(packet); + packet = NULL; + } // Close the codec - if (info.has_video) - { + if (info.has_video) { avcodec_flush_buffers(pCodecCtx); AV_FREE_CONTEXT(pCodecCtx); +#if HAVE_HW_ACCEL + if (hw_de_on) { + if (hw_device_ctx) { + av_buffer_unref(&hw_device_ctx); + hw_device_ctx = NULL; + } + } +#endif // HAVE_HW_ACCEL } - if (info.has_audio) - { + if (info.has_audio) { avcodec_flush_buffers(aCodecCtx); AV_FREE_CONTEXT(aCodecCtx); } @@ -266,7 +613,7 @@ // Clear processed lists { - const GenericScopedLock lock(processingCriticalSection); + const GenericScopedLock lock(processingCriticalSection); processed_video_frames.clear(); processed_audio_frames.clear(); processing_video_frames.clear(); @@ -289,18 +636,19 @@ seek_video_frame_found = 0; current_video_frame = 0; has_missing_frames = false; + + last_video_frame.reset(); } } -void FFmpegReader::UpdateAudioInfo() -{ +void FFmpegReader::UpdateAudioInfo() { // Set values of FileInfo struct info.has_audio = true; info.file_size = pFormatCtx->pb ? avio_size(pFormatCtx->pb) : -1; info.acodec = aCodecCtx->codec->name; info.channels = AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->channels; if (AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->channel_layout == 0) - AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->channel_layout = av_get_default_channel_layout( AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->channels ); + AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->channel_layout = av_get_default_channel_layout(AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->channels); info.channel_layout = (ChannelLayout) AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->channel_layout; info.sample_rate = AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->sample_rate; info.audio_bit_rate = AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->bit_rate; @@ -314,15 +662,13 @@ info.duration = aStream->duration * info.audio_timebase.ToDouble(); // Check for an invalid video length - if (info.has_video && info.video_length <= 0) - { + if (info.has_video && info.video_length <= 0) { // Calculate the video length from the audio duration info.video_length = info.duration * info.fps.ToDouble(); } // Set video timebase (if no video stream was found) - if (!info.has_video) - { + if (!info.has_video) { // Set a few important default video settings (so audio can be divided into frames) info.fps.num = 24; info.fps.den = 1; @@ -347,8 +693,7 @@ } } -void FFmpegReader::UpdateVideoInfo() -{ +void FFmpegReader::UpdateVideoInfo() { if (check_fps) // Already initialized all the video metadata, no reason to do it again return; @@ -365,18 +710,13 @@ info.fps.num = pStream->avg_frame_rate.num; info.fps.den = pStream->avg_frame_rate.den; - if (pStream->sample_aspect_ratio.num != 0) - { + if (pStream->sample_aspect_ratio.num != 0) { info.pixel_ratio.num = pStream->sample_aspect_ratio.num; info.pixel_ratio.den = pStream->sample_aspect_ratio.den; - } - else if (AV_GET_CODEC_ATTRIBUTES(pStream, pCodecCtx)->sample_aspect_ratio.num != 0) - { + } else if (AV_GET_CODEC_ATTRIBUTES(pStream, pCodecCtx)->sample_aspect_ratio.num != 0) { info.pixel_ratio.num = AV_GET_CODEC_ATTRIBUTES(pStream, pCodecCtx)->sample_aspect_ratio.num; info.pixel_ratio.den = AV_GET_CODEC_ATTRIBUTES(pStream, pCodecCtx)->sample_aspect_ratio.den; - } - else - { + } else { info.pixel_ratio.num = 1; info.pixel_ratio.den = 1; } @@ -392,6 +732,33 @@ info.display_ratio.num = size.num; info.display_ratio.den = size.den; + // Get scan type and order from codec context/params + if (!check_interlace) { + check_interlace = true; + AVFieldOrder field_order = AV_GET_CODEC_ATTRIBUTES(pStream, pCodecCtx)->field_order; + switch(field_order) { + case AV_FIELD_PROGRESSIVE: + info.interlaced_frame = false; + break; + case AV_FIELD_TT: + case AV_FIELD_TB: + info.interlaced_frame = true; + info.top_field_first = true; + break; + case AV_FIELD_BT: + case AV_FIELD_BB: + info.interlaced_frame = true; + info.top_field_first = false; + break; + case AV_FIELD_UNKNOWN: + // Check again later? + check_interlace = false; + break; + } + // check_interlace will prevent these checks being repeated, + // unless it was cleared because we got an AV_FIELD_UNKNOWN response. + } + // Set the video timebase info.video_timebase.num = pStream->time_base.num; info.video_timebase.den = pStream->time_base.den; @@ -410,15 +777,12 @@ info.duration = (info.file_size / info.video_bit_rate); // No duration found in stream of file - if (info.duration <= 0.0f) - { + if (info.duration <= 0.0f) { // No duration is found in the video stream info.duration = -1; info.video_length = -1; is_duration_known = false; - } - else - { + } else { // Yes, a duration was found is_duration_known = true; @@ -443,8 +807,7 @@ } -std::shared_ptr FFmpegReader::GetFrame(int64_t requested_frame) -{ +std::shared_ptr FFmpegReader::GetFrame(int64_t requested_frame) { // Check for open reader (or throw exception) if (!is_open) throw ReaderClosed("The FFmpegReader is closed. Call Open() before calling this method.", path); @@ -459,32 +822,27 @@ throw InvalidFile("Could not detect the duration of the video or audio stream.", path); // Debug output - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetFrame", "requested_frame", requested_frame, "last_frame", last_frame, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetFrame", "requested_frame", requested_frame, "last_frame", last_frame); // Check the cache for this frame std::shared_ptr frame = final_cache.GetFrame(requested_frame); if (frame) { // Debug output - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetFrame", "returned cached frame", requested_frame, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetFrame", "returned cached frame", requested_frame); // Return the cached frame return frame; - } - else - { - #pragma omp critical (ReadStream) - { + } else { +#pragma omp critical (ReadStream) + { // Check the cache a 2nd time (due to a potential previous lock) - if (has_missing_frames) - CheckMissingFrame(requested_frame); frame = final_cache.GetFrame(requested_frame); if (frame) { // Debug output - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetFrame", "returned cached frame on 2nd look", requested_frame, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetFrame", "returned cached frame on 2nd look", requested_frame); // Return the cached frame - } - else { + } else { // Frame is not in cache // Reset seek count seek_count = 0; @@ -496,20 +854,16 @@ // Are we within X frames of the requested frame? int64_t diff = requested_frame - last_frame; - if (diff >= 1 && diff <= 20) - { + if (diff >= 1 && diff <= 20) { // Continue walking the stream frame = ReadStream(requested_frame); - } - else - { + } else { // Greater than 30 frames away, or backwards, we need to seek to the nearest key frame if (enable_seek) // Only seek if enabled Seek(requested_frame); - else if (!enable_seek && diff < 0) - { + else if (!enable_seek && diff < 0) { // Start over, since we can't seek, and the requested frame is smaller than our position Close(); Open(); @@ -519,14 +873,13 @@ frame = ReadStream(requested_frame); } } - } //omp critical - return frame; + } //omp critical + return frame; } } // Read the stream until we find the requested Frame -std::shared_ptr FFmpegReader::ReadStream(int64_t requested_frame) -{ +std::shared_ptr FFmpegReader::ReadStream(int64_t requested_frame) { // Allocate video frame bool end_of_stream = false; bool check_seek = false; @@ -536,7 +889,7 @@ // Minimum number of packets to process (for performance reasons) int packets_processed = 0; int minimum_packets = OPEN_MP_NUM_PROCESSORS; - int max_packets = 4096; + int max_packets = 4096; // Set the number of threads in OpenMP omp_set_num_threads(OPEN_MP_NUM_PROCESSORS); @@ -544,22 +897,21 @@ omp_set_nested(true); // Debug output - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream", "requested_frame", requested_frame, "OPEN_MP_NUM_PROCESSORS", OPEN_MP_NUM_PROCESSORS, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream", "requested_frame", requested_frame, "OPEN_MP_NUM_PROCESSORS", OPEN_MP_NUM_PROCESSORS); - #pragma omp parallel +#pragma omp parallel { - #pragma omp single +#pragma omp single { // Loop through the stream until the correct frame is found - while (true) - { + while (true) { // Get the next packet into a local variable called packet packet_error = GetNextPacket(); int processing_video_frames_size = 0; int processing_audio_frames_size = 0; { - const GenericScopedLock lock(processingCriticalSection); + const GenericScopedLock lock(processingCriticalSection); processing_video_frames_size = processing_video_frames.size(); processing_audio_frames_size = processing_audio_frames.size(); } @@ -567,14 +919,13 @@ // Wait if too many frames are being processed while (processing_video_frames_size + processing_audio_frames_size >= minimum_packets) { usleep(2500); - const GenericScopedLock lock(processingCriticalSection); + const GenericScopedLock lock(processingCriticalSection); processing_video_frames_size = processing_video_frames.size(); processing_audio_frames_size = processing_audio_frames.size(); } // Get the next packet (if any) - if (packet_error < 0) - { + if (packet_error < 0) { // Break loop when no more packets found end_of_stream = true; break; @@ -584,60 +935,67 @@ ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (GetNextPacket)", "requested_frame", requested_frame, "processing_video_frames_size", processing_video_frames_size, "processing_audio_frames_size", processing_audio_frames_size, "minimum_packets", minimum_packets, "packets_processed", packets_processed, "is_seeking", is_seeking); // Video packet - if (info.has_video && packet->stream_index == videoStream) - { + if (info.has_video && packet->stream_index == videoStream) { // Reset this counter, since we have a video packet num_packets_since_video_frame = 0; // Check the status of a seek (if any) - if (is_seeking) - #pragma omp critical (openshot_seek) - check_seek = CheckSeek(true); - else - check_seek = false; - - if (check_seek) { - // Jump to the next iteration of this loop - continue; - } + if (is_seeking) +#pragma omp critical (openshot_seek) + check_seek = CheckSeek(true); + else + check_seek = false; + + if (check_seek) { + // Jump to the next iteration of this loop + continue; + } + + // Packet may become NULL on Close inside Seek if CheckSeek returns false + if (!packet) + // Jump to the next iteration of this loop + continue; // Get the AVFrame from the current packet frame_finished = GetAVFrame(); // Check if the AVFrame is finished and set it - if (frame_finished) - { + if (frame_finished) { // Update PTS / Frame Offset (if any) UpdatePTSOffset(true); // Process Video Packet ProcessVideoPacket(requested_frame); - if (!use_omp_threads) { + if (openshot::Settings::Instance()->WAIT_FOR_VIDEO_PROCESSING_TASK) { // Wait on each OMP task to complete before moving on to the next one. This slows // down processing considerably, but might be more stable on some systems. - #pragma omp taskwait +#pragma omp taskwait } } } // Audio packet - else if (info.has_audio && packet->stream_index == audioStream) - { + else if (info.has_audio && packet->stream_index == audioStream) { // Increment this (to track # of packets since the last video packet) num_packets_since_video_frame++; // Check the status of a seek (if any) - if (is_seeking) - #pragma omp critical (openshot_seek) - check_seek = CheckSeek(false); - else - check_seek = false; - - if (check_seek) { - // Jump to the next iteration of this loop - continue; - } + if (is_seeking) +#pragma omp critical (openshot_seek) + check_seek = CheckSeek(false); + else + check_seek = false; + + if (check_seek) { + // Jump to the next iteration of this loop + continue; + } + + // Packet may become NULL on Close inside Seek if CheckSeek returns false + if (!packet) + // Jump to the next iteration of this loop + continue; // Update PTS / Frame Offset (if any) UpdatePTSOffset(false); @@ -651,9 +1009,6 @@ // Check if working frames are 'finished' if (!is_seeking) { - // Check for any missing frames - CheckMissingFrame(requested_frame); - // Check for final frames CheckWorkingFrames(false, requested_frame); } @@ -675,7 +1030,7 @@ } // end omp parallel // Debug output - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (Completed)", "packets_processed", packets_processed, "end_of_stream", end_of_stream, "largest_frame_processed", largest_frame_processed, "Working Cache Count", working_cache.Count(), "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (Completed)", "packets_processed", packets_processed, "end_of_stream", end_of_stream, "largest_frame_processed", largest_frame_processed, "Working Cache Count", working_cache.Count()); // End of stream? if (end_of_stream) @@ -694,8 +1049,7 @@ if (frame) { // return the largest processed frame (assuming it was the last in the video file) return frame; - } - else { + } else { // The largest processed frame is no longer in cache, return a blank frame std::shared_ptr f = CreateFrame(largest_frame_processed); f->AddColor(info.width, info.height, "#000"); @@ -706,51 +1060,93 @@ } // Get the next packet (if any) -int FFmpegReader::GetNextPacket() -{ +int FFmpegReader::GetNextPacket() { int found_packet = 0; - AVPacket *next_packet = new AVPacket(); - found_packet = av_read_frame(pFormatCtx, next_packet); + AVPacket *next_packet; +#pragma omp critical(getnextpacket) + { + next_packet = new AVPacket(); + found_packet = av_read_frame(pFormatCtx, next_packet); - if (packet) { - // Remove previous packet before getting next one - RemoveAVPacket(packet); - packet = NULL; - } - if (found_packet >= 0) - { - // Update current packet pointer - packet = next_packet; - } + if (packet) { + // Remove previous packet before getting next one + RemoveAVPacket(packet); + packet = NULL; + } + if (found_packet >= 0) { + // Update current packet pointer + packet = next_packet; + } + else + delete next_packet; + } // Return if packet was found (or error number) return found_packet; } // Get an AVFrame (if any) -bool FFmpegReader::GetAVFrame() -{ +bool FFmpegReader::GetAVFrame() { int frameFinished = -1; int ret = 0; // Decode video frame AVFrame *next_frame = AV_ALLOCATE_FRAME(); - #pragma omp critical (packet_cache) +#pragma omp critical (packet_cache) { - #if IS_FFMPEG_3_2 +#if IS_FFMPEG_3_2 frameFinished = 0; + ret = avcodec_send_packet(pCodecCtx, packet); + + #if HAVE_HW_ACCEL + // Get the format from the variables set in get_hw_dec_format + hw_de_av_pix_fmt = hw_de_av_pix_fmt_global; + hw_de_av_device_type = hw_de_av_device_type_global; + #endif // HAVE_HW_ACCEL if (ret < 0 || ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) { - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (Packet not sent)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (Packet not sent)"); } else { - pFrame = new AVFrame(); + AVFrame *next_frame2; + #if HAVE_HW_ACCEL + if (hw_de_on && hw_de_supported) { + next_frame2 = AV_ALLOCATE_FRAME(); + } + else + #endif // HAVE_HW_ACCEL + { + next_frame2 = next_frame; + } + pFrame = AV_ALLOCATE_FRAME(); while (ret >= 0) { - ret = avcodec_receive_frame(pCodecCtx, next_frame); - if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) { - break; + ret = avcodec_receive_frame(pCodecCtx, next_frame2); + if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) { + break; + } + if (ret != 0) { + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (invalid return frame received)"); } + #if HAVE_HW_ACCEL + if (hw_de_on && hw_de_supported) { + int err; + if (next_frame2->format == hw_de_av_pix_fmt) { + next_frame->format = AV_PIX_FMT_YUV420P; + if ((err = av_hwframe_transfer_data(next_frame,next_frame2,0)) < 0) { + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (Failed to transfer data to output frame)"); + } + if ((err = av_frame_copy_props(next_frame,next_frame2)) < 0) { + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (Failed to copy props to output frame)"); + } + } + } + else + #endif // HAVE_HW_ACCEL + { // No hardware acceleration used -> no copy from GPU memory needed + next_frame = next_frame2; + } + // TODO also handle possible further frames // Use only the first frame like avcodec_decode_video2 if (frameFinished == 0 ) { @@ -758,34 +1154,29 @@ av_image_alloc(pFrame->data, pFrame->linesize, info.width, info.height, (AVPixelFormat)(pStream->codecpar->format), 1); av_image_copy(pFrame->data, pFrame->linesize, (const uint8_t**)next_frame->data, next_frame->linesize, (AVPixelFormat)(pStream->codecpar->format), info.width, info.height); - if (!check_interlace) { - check_interlace = true; - info.interlaced_frame = next_frame->interlaced_frame; - info.top_field_first = next_frame->top_field_first; - } } } + #if HAVE_HW_ACCEL + if (hw_de_on && hw_de_supported) { + AV_FREE_FRAME(&next_frame2); + } + #endif // HAVE_HW_ACCEL } - #else +#else avcodec_decode_video2(pCodecCtx, next_frame, &frameFinished, packet); + // always allocate pFrame (because we do that in the ffmpeg >= 3.2 as well); it will always be freed later + pFrame = AV_ALLOCATE_FRAME(); + // is frame finished if (frameFinished) { // AVFrames are clobbered on the each call to avcodec_decode_video, so we // must make a copy of the image data before this method is called again. - pFrame = AV_ALLOCATE_FRAME(); avpicture_alloc((AVPicture *) pFrame, pCodecCtx->pix_fmt, info.width, info.height); av_picture_copy((AVPicture *) pFrame, (AVPicture *) next_frame, pCodecCtx->pix_fmt, info.width, info.height); - - // Detect interlaced frame (only once) - if (!check_interlace) { - check_interlace = true; - info.interlaced_frame = next_frame->interlaced_frame; - info.top_field_first = next_frame->top_field_first; - } } - #endif +#endif // IS_FFMPEG_3_2 } // deallocate the frame @@ -796,11 +1187,9 @@ } // Check the current seek position and determine if we need to seek again -bool FFmpegReader::CheckSeek(bool is_video) -{ +bool FFmpegReader::CheckSeek(bool is_video) { // Are we seeking for a specific frame? - if (is_seeking) - { + if (is_seeking) { // Determine if both an audio and video packet have been decoded since the seek happened. // If not, allow the ReadStream method to keep looping if ((is_video_seek && !seek_video_frame_found) || (!is_video_seek && !seek_audio_frame_found)) @@ -816,16 +1205,13 @@ max_seeked_frame = seek_video_frame_found; // determine if we are "before" the requested frame - if (max_seeked_frame >= seeking_frame) - { + if (max_seeked_frame >= seeking_frame) { // SEEKED TOO FAR ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::CheckSeek (Too far, seek again)", "is_video_seek", is_video_seek, "max_seeked_frame", max_seeked_frame, "seeking_frame", seeking_frame, "seeking_pts", seeking_pts, "seek_video_frame_found", seek_video_frame_found, "seek_audio_frame_found", seek_audio_frame_found); // Seek again... to the nearest Keyframe Seek(seeking_frame - (10 * seek_count * seek_count)); - } - else - { + } else { // SEEK WORKED ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::CheckSeek (Successful)", "is_video_seek", is_video_seek, "current_pts", packet->pts, "seeking_pts", seeking_pts, "seeking_frame", seeking_frame, "seek_video_frame_found", seek_video_frame_found, "seek_audio_frame_found", seek_audio_frame_found); @@ -841,8 +1227,7 @@ } // Process a video packet -void FFmpegReader::ProcessVideoPacket(int64_t requested_frame) -{ +void FFmpegReader::ProcessVideoPacket(int64_t requested_frame) { // Calculate current frame # int64_t current_frame = ConvertVideoPTStoFrame(GetVideoPTS()); @@ -851,20 +1236,19 @@ seek_video_frame_found = current_frame; // Are we close enough to decode the frame? and is this frame # valid? - if ((current_frame < (requested_frame - 20)) or (current_frame == -1)) - { + if ((current_frame < (requested_frame - 20)) or (current_frame == -1)) { // Remove frame and packet RemoveAVFrame(pFrame); // Debug output - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ProcessVideoPacket (Skipped)", "requested_frame", requested_frame, "current_frame", current_frame, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ProcessVideoPacket (Skipped)", "requested_frame", requested_frame, "current_frame", current_frame); // Skip to next frame without decoding or caching return; } // Debug output - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ProcessVideoPacket (Before)", "requested_frame", requested_frame, "current_frame", current_frame, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ProcessVideoPacket (Before)", "requested_frame", requested_frame, "current_frame", current_frame); // Init some things local (for OpenMP) PixelFormat pix_fmt = AV_GET_CODEC_PIXEL_FORMAT(pStream, pCodecCtx); @@ -872,12 +1256,13 @@ int width = info.width; int64_t video_length = info.video_length; AVFrame *my_frame = pFrame; + pFrame = NULL; // Add video frame to list of processing video frames - const GenericScopedLock lock(processingCriticalSection); + const GenericScopedLock lock(processingCriticalSection); processing_video_frames[current_frame] = current_frame; - #pragma omp task firstprivate(current_frame, my_frame, height, width, video_length, pix_fmt) +#pragma omp task firstprivate(current_frame, my_frame, height, width, video_length, pix_fmt) { // Create variables for a RGB Frame (since most videos are not in RGB, we must convert it) AVFrame *pFrameRGB = NULL; @@ -889,9 +1274,52 @@ if (pFrameRGB == NULL) throw OutOfBoundsFrame("Convert Image Broke!", current_frame, video_length); - // Determine if video needs to be scaled down (for performance reasons) - // Timelines pass their size to the clips, which pass their size to the readers (as max size) - // If a clip is being scaled larger, it will set max_width and max_height = 0 (which means don't down scale) + // Determine the max size of this source image (based on the timeline's size, the scaling mode, + // and the scaling keyframes). This is a performance improvement, to keep the images as small as possible, + // without losing quality. NOTE: We cannot go smaller than the timeline itself, or the add_layer timeline + // method will scale it back to timeline size before scaling it smaller again. This needs to be fixed in + // the future. + int max_width = openshot::Settings::Instance()->MAX_WIDTH; + if (max_width <= 0) + max_width = info.width; + int max_height = openshot::Settings::Instance()->MAX_HEIGHT; + if (max_height <= 0) + max_height = info.height; + + Clip *parent = (Clip *) GetClip(); + if (parent) { + if (parent->scale == SCALE_FIT || parent->scale == SCALE_STRETCH) { + // Best fit or Stretch scaling (based on max timeline size * scaling keyframes) + float max_scale_x = parent->scale_x.GetMaxPoint().co.Y; + float max_scale_y = parent->scale_y.GetMaxPoint().co.Y; + max_width = std::max(float(max_width), max_width * max_scale_x); + max_height = std::max(float(max_height), max_height * max_scale_y); + + } else if (parent->scale == SCALE_CROP) { + // Cropping scale mode (based on max timeline size * cropped size * scaling keyframes) + float max_scale_x = parent->scale_x.GetMaxPoint().co.Y; + float max_scale_y = parent->scale_y.GetMaxPoint().co.Y; + QSize width_size(max_width * max_scale_x, + round(max_width / (float(info.width) / float(info.height)))); + QSize height_size(round(max_height / (float(info.height) / float(info.width))), + max_height * max_scale_y); + // respect aspect ratio + if (width_size.width() >= max_width && width_size.height() >= max_height) { + max_width = std::max(max_width, width_size.width()); + max_height = std::max(max_height, width_size.height()); + } else { + max_width = std::max(max_width, height_size.width()); + max_height = std::max(max_height, height_size.height()); + } + + } else { + // No scaling, use original image size (slower) + max_width = info.width; + max_height = info.height; + } + } + + // Determine if image needs to be scaled (for performance reasons) int original_height = height; if (max_width != 0 && max_height != 0 && max_width < width && max_height < height) { // Override width and height (but maintain aspect ratio) @@ -913,14 +1341,18 @@ // Determine required buffer size and allocate buffer numBytes = AV_GET_IMAGE_SIZE(PIX_FMT_RGBA, width, height); - #pragma omp critical (video_buffer) +#pragma omp critical (video_buffer) buffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t)); // Copy picture data from one AVFrame (or AVPicture) to another one. AV_COPY_PICTURE_DATA(pFrameRGB, buffer, PIX_FMT_RGBA, width, height); + int scale_mode = SWS_FAST_BILINEAR; + if (openshot::Settings::Instance()->HIGH_QUALITY_SCALING) { + scale_mode = SWS_BICUBIC; + } SwsContext *img_convert_ctx = sws_getContext(info.width, info.height, AV_GET_CODEC_PIXEL_FORMAT(pStream, pCodecCtx), width, - height, PIX_FMT_RGBA, SWS_LANCZOS, NULL, NULL, NULL); + height, PIX_FMT_RGBA, scale_mode, NULL, NULL, NULL); // Resize / Convert to RGB sws_scale(img_convert_ctx, my_frame->data, my_frame->linesize, 0, @@ -936,7 +1368,7 @@ working_cache.Add(f); // Keep track of last last_video_frame - #pragma omp critical (video_buffer) +#pragma omp critical (video_buffer) last_video_frame = f; // Free the RGB image @@ -949,37 +1381,35 @@ // Remove video frame from list of processing video frames { - const GenericScopedLock lock(processingCriticalSection); + const GenericScopedLock lock(processingCriticalSection); processing_video_frames.erase(current_frame); processed_video_frames[current_frame] = current_frame; } // Debug output - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ProcessVideoPacket (After)", "requested_frame", requested_frame, "current_frame", current_frame, "f->number", f->number, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ProcessVideoPacket (After)", "requested_frame", requested_frame, "current_frame", current_frame, "f->number", f->number); } // end omp task } // Process an audio packet -void FFmpegReader::ProcessAudioPacket(int64_t requested_frame, int64_t target_frame, int starting_sample) -{ +void FFmpegReader::ProcessAudioPacket(int64_t requested_frame, int64_t target_frame, int starting_sample) { // Track 1st audio packet after a successful seek if (!seek_audio_frame_found && is_seeking) seek_audio_frame_found = target_frame; // Are we close enough to decode the frame's audio? - if (target_frame < (requested_frame - 20)) - { + if (target_frame < (requested_frame - 20)) { // Debug output - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ProcessAudioPacket (Skipped)", "requested_frame", requested_frame, "target_frame", target_frame, "starting_sample", starting_sample, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ProcessAudioPacket (Skipped)", "requested_frame", requested_frame, "target_frame", target_frame, "starting_sample", starting_sample); // Skip to next frame without decoding or caching return; } // Debug output - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ProcessAudioPacket (Before)", "requested_frame", requested_frame, "target_frame", target_frame, "starting_sample", starting_sample, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ProcessAudioPacket (Before)", "requested_frame", requested_frame, "target_frame", target_frame, "starting_sample", starting_sample); // Init an AVFrame to hold the decoded audio samples int frame_finished = 0; @@ -991,9 +1421,9 @@ // re-initialize buffer size (it gets changed in the avcodec_decode_audio2 method call) int buf_size = AVCODEC_MAX_AUDIO_FRAME_SIZE + MY_INPUT_BUFFER_PADDING_SIZE; - #pragma omp critical (ProcessAudioPacket) +#pragma omp critical (ProcessAudioPacket) { - #if IS_FFMPEG_3_2 +#if IS_FFMPEG_3_2 int ret = 0; frame_finished = 1; while((packet->size > 0 || (!packet->data && frame_finished)) && ret >= 0) { @@ -1020,7 +1450,7 @@ { ret = -1; } - #else +#else int used = avcodec_decode_audio4(aCodecCtx, audio_frame, &frame_finished, packet); #endif } @@ -1028,12 +1458,12 @@ if (frame_finished) { // determine how many samples were decoded - int planar = av_sample_fmt_is_planar((AVSampleFormat)AV_GET_CODEC_PIXEL_FORMAT(aStream, aCodecCtx)); + int planar = av_sample_fmt_is_planar((AVSampleFormat) AV_GET_CODEC_PIXEL_FORMAT(aStream, aCodecCtx)); int plane_size = -1; data_size = av_samples_get_buffer_size(&plane_size, - AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->channels, - audio_frame->nb_samples, - (AVSampleFormat)(AV_GET_SAMPLE_FORMAT(aStream, aCodecCtx)), 1); + AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->channels, + audio_frame->nb_samples, + (AVSampleFormat) (AV_GET_SAMPLE_FORMAT(aStream, aCodecCtx)), 1); // Calculate total number of samples packet_samples = audio_frame->nb_samples * AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->channels; @@ -1059,12 +1489,11 @@ // Add audio frame to list of processing audio frames { - const GenericScopedLock lock(processingCriticalSection); - processing_audio_frames.insert(pair(previous_packet_location.frame, previous_packet_location.frame)); + const GenericScopedLock lock(processingCriticalSection); + processing_audio_frames.insert(std::pair(previous_packet_location.frame, previous_packet_location.frame)); } - while (pts_remaining_samples) - { + while (pts_remaining_samples) { // Get Samples per frame (for this frame number) int samples_per_frame = Frame::GetSamplesPerFrame(previous_packet_location.frame, info.fps, info.sample_rate, info.channels); @@ -1083,8 +1512,8 @@ // Add audio frame to list of processing audio frames { - const GenericScopedLock lock(processingCriticalSection); - processing_audio_frames.insert(pair(previous_packet_location.frame, previous_packet_location.frame)); + const GenericScopedLock lock(processingCriticalSection); + processing_audio_frames.insert(std::pair(previous_packet_location.frame, previous_packet_location.frame)); } } else { @@ -1097,7 +1526,7 @@ // Allocate audio buffer int16_t *audio_buf = new int16_t[AVCODEC_MAX_AUDIO_FRAME_SIZE + MY_INPUT_BUFFER_PADDING_SIZE]; - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ProcessAudioPacket (ReSample)", "packet_samples", packet_samples, "info.channels", info.channels, "info.sample_rate", info.sample_rate, "aCodecCtx->sample_fmt", AV_GET_SAMPLE_FORMAT(aStream, aCodecCtx), "AV_SAMPLE_FMT_S16", AV_SAMPLE_FMT_S16, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ProcessAudioPacket (ReSample)", "packet_samples", packet_samples, "info.channels", info.channels, "info.sample_rate", info.sample_rate, "aCodecCtx->sample_fmt", AV_GET_SAMPLE_FORMAT(aStream, aCodecCtx), "AV_SAMPLE_FMT_S16", AV_SAMPLE_FMT_S16); // Create output frame AVFrame *audio_converted = AV_ALLOCATE_FRAME(); @@ -1110,24 +1539,24 @@ // setup resample context avr = SWR_ALLOC(); - av_opt_set_int(avr, "in_channel_layout", AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->channel_layout, 0); + av_opt_set_int(avr, "in_channel_layout", AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->channel_layout, 0); av_opt_set_int(avr, "out_channel_layout", AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->channel_layout, 0); - av_opt_set_int(avr, "in_sample_fmt", AV_GET_SAMPLE_FORMAT(aStream, aCodecCtx), 0); - av_opt_set_int(avr, "out_sample_fmt", AV_SAMPLE_FMT_S16, 0); - av_opt_set_int(avr, "in_sample_rate", info.sample_rate, 0); - av_opt_set_int(avr, "out_sample_rate", info.sample_rate, 0); - av_opt_set_int(avr, "in_channels", info.channels, 0); - av_opt_set_int(avr, "out_channels", info.channels, 0); + av_opt_set_int(avr, "in_sample_fmt", AV_GET_SAMPLE_FORMAT(aStream, aCodecCtx), 0); + av_opt_set_int(avr, "out_sample_fmt", AV_SAMPLE_FMT_S16, 0); + av_opt_set_int(avr, "in_sample_rate", info.sample_rate, 0); + av_opt_set_int(avr, "out_sample_rate", info.sample_rate, 0); + av_opt_set_int(avr, "in_channels", info.channels, 0); + av_opt_set_int(avr, "out_channels", info.channels, 0); int r = SWR_INIT(avr); // Convert audio samples - nb_samples = SWR_CONVERT(avr, // audio resample context - audio_converted->data, // output data pointers - audio_converted->linesize[0], // output plane size, in bytes. (0 if unknown) - audio_converted->nb_samples, // maximum number of samples that the output buffer can hold - audio_frame->data, // input data pointers - audio_frame->linesize[0], // input plane size, in bytes (0 if unknown) - audio_frame->nb_samples); // number of input samples to convert + nb_samples = SWR_CONVERT(avr, // audio resample context + audio_converted->data, // output data pointers + audio_converted->linesize[0], // output plane size, in bytes. (0 if unknown) + audio_converted->nb_samples, // maximum number of samples that the output buffer can hold + audio_frame->data, // input data pointers + audio_frame->linesize[0], // input plane size, in bytes (0 if unknown) + audio_frame->nb_samples); // number of input samples to convert // Copy audio samples over original samples memcpy(audio_buf, audio_converted->data[0], audio_converted->nb_samples * av_get_bytes_per_sample(AV_SAMPLE_FMT_S16) * info.channels); @@ -1143,8 +1572,7 @@ int64_t starting_frame_number = -1; bool partial_frame = true; - for (int channel_filter = 0; channel_filter < info.channels; channel_filter++) - { + for (int channel_filter = 0; channel_filter < info.channels; channel_filter++) { // Array of floats (to hold samples for each channel) starting_frame_number = target_frame; int channel_buffer_size = packet_samples / info.channels; @@ -1158,11 +1586,9 @@ // Toggle through each channel number, since channel data is stored like (left right left right) int channel = 0; int position = 0; - for (int sample = 0; sample < packet_samples; sample++) - { + for (int sample = 0; sample < packet_samples; sample++) { // Only add samples for current channel - if (channel_filter == channel) - { + if (channel_filter == channel) { // Add sample (convert from (-32768 to 32768) to (-1.0 to 1.0)) channel_buffer[position] = audio_buf[sample] * (1.0f / (1 << 15)); @@ -1173,7 +1599,7 @@ // increment channel (if needed) if ((channel + 1) < info.channels) // move to next channel - channel ++; + channel++; else // reset channel channel = 0; @@ -1182,9 +1608,8 @@ // Loop through samples, and add them to the correct frames int start = starting_sample; int remaining_samples = channel_buffer_size; - float *iterate_channel_buffer = channel_buffer; // pointer to channel buffer - while (remaining_samples > 0) - { + float *iterate_channel_buffer = channel_buffer; // pointer to channel buffer + while (remaining_samples > 0) { // Get Samples per frame (for this frame number) int samples_per_frame = Frame::GetSamplesPerFrame(starting_frame_number, info.fps, info.sample_rate, info.channels); @@ -1238,7 +1663,7 @@ // Remove audio frame from list of processing audio frames { - const GenericScopedLock lock(processingCriticalSection); + const GenericScopedLock lock(processingCriticalSection); // Update all frames as completed for (int64_t f = target_frame; f < starting_frame_number; f++) { // Remove the frame # from the processing list. NOTE: If more than one thread is @@ -1262,15 +1687,13 @@ AV_FREE_FRAME(&audio_frame); // Debug output - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ProcessAudioPacket (After)", "requested_frame", requested_frame, "starting_frame", target_frame, "end_frame", starting_frame_number - 1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ProcessAudioPacket (After)", "requested_frame", requested_frame, "starting_frame", target_frame, "end_frame", starting_frame_number - 1); } - // Seek to a specific frame. This is not always frame accurate, it's more of an estimation on many codecs. -void FFmpegReader::Seek(int64_t requested_frame) -{ +void FFmpegReader::Seek(int64_t requested_frame) { // Adjust for a requested frame that is too small or too large if (requested_frame < 1) requested_frame = 1; @@ -1280,7 +1703,7 @@ int processing_video_frames_size = 0; int processing_audio_frames_size = 0; { - const GenericScopedLock lock(processingCriticalSection); + const GenericScopedLock lock(processingCriticalSection); processing_video_frames_size = processing_video_frames.size(); processing_audio_frames_size = processing_audio_frames.size(); } @@ -1291,7 +1714,7 @@ // Wait for any processing frames to complete while (processing_video_frames_size + processing_audio_frames_size > 0) { usleep(2500); - const GenericScopedLock lock(processingCriticalSection); + const GenericScopedLock lock(processingCriticalSection); processing_video_frames_size = processing_video_frames.size(); processing_audio_frames_size = processing_audio_frames.size(); } @@ -1302,7 +1725,7 @@ // Clear processed lists { - const GenericScopedLock lock(processingCriticalSection); + const GenericScopedLock lock(processingCriticalSection); processing_audio_frames.clear(); processing_video_frames.clear(); processed_video_frames.clear(); @@ -1328,9 +1751,8 @@ seek_count++; // If seeking near frame 1, we need to close and re-open the file (this is more reliable than seeking) - int buffer_amount = max(OPEN_MP_NUM_PROCESSORS, 8); - if (requested_frame - buffer_amount < 20) - { + int buffer_amount = std::max(OPEN_MP_NUM_PROCESSORS, 8); + if (requested_frame - buffer_amount < 20) { // Close and re-open file (basically seeking to frame 1) Close(); Open(); @@ -1348,21 +1770,18 @@ } seek_audio_frame_found = 0; // used to detect which frames to throw away after a seek seek_video_frame_found = 0; // used to detect which frames to throw away after a seek - } - else - { + + } else { // Seek to nearest key-frame (aka, i-frame) bool seek_worked = false; int64_t seek_target = 0; // Seek video stream (if any) - if (!seek_worked && info.has_video) - { + if (!seek_worked && info.has_video) { seek_target = ConvertFrameToVideoPTS(requested_frame - buffer_amount); if (av_seek_frame(pFormatCtx, info.video_stream_index, seek_target, AVSEEK_FLAG_BACKWARD) < 0) { fprintf(stderr, "%s: error while seeking video stream\n", pFormatCtx->AV_FILENAME); - } else - { + } else { // VIDEO SEEK is_video_seek = true; seek_worked = true; @@ -1370,13 +1789,11 @@ } // Seek audio stream (if not already seeked... and if an audio stream is found) - if (!seek_worked && info.has_audio) - { + if (!seek_worked && info.has_audio) { seek_target = ConvertFrameToAudioPTS(requested_frame - buffer_amount); if (av_seek_frame(pFormatCtx, info.audio_stream_index, seek_target, AVSEEK_FLAG_BACKWARD) < 0) { fprintf(stderr, "%s: error while seeking audio stream\n", pFormatCtx->AV_FILENAME); - } else - { + } else { // AUDIO SEEK is_video_seek = false; seek_worked = true; @@ -1384,8 +1801,7 @@ } // Was the seek successful? - if (seek_worked) - { + if (seek_worked) { // Flush audio buffer if (info.has_audio) avcodec_flush_buffers(aCodecCtx); @@ -1408,9 +1824,7 @@ seek_audio_frame_found = 0; // used to detect which frames to throw away after a seek seek_video_frame_found = 0; // used to detect which frames to throw away after a seek - } - else - { + } else { // seek failed is_seeking = false; seeking_pts = 0; @@ -1432,10 +1846,9 @@ } // Get the PTS for the current video packet -int64_t FFmpegReader::GetVideoPTS() -{ +int64_t FFmpegReader::GetVideoPTS() { int64_t current_pts = 0; - if(packet->dts != AV_NOPTS_VALUE) + if (packet->dts != AV_NOPTS_VALUE) current_pts = packet->dts; // Return adjusted PTS @@ -1443,38 +1856,33 @@ } // Update PTS Offset (if any) -void FFmpegReader::UpdatePTSOffset(bool is_video) -{ +void FFmpegReader::UpdatePTSOffset(bool is_video) { // Determine the offset between the PTS and Frame number (only for 1st frame) - if (is_video) - { + if (is_video) { // VIDEO PACKET if (video_pts_offset == 99999) // Has the offset been set yet? { // Find the difference between PTS and frame number (no more than 10 timebase units allowed) - video_pts_offset = 0 - max(GetVideoPTS(), (int64_t) info.video_timebase.ToInt() * 10); + video_pts_offset = 0 - std::max(GetVideoPTS(), (int64_t) info.video_timebase.ToInt() * 10); // debug output - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::UpdatePTSOffset (Video)", "video_pts_offset", video_pts_offset, "is_video", is_video, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::UpdatePTSOffset (Video)", "video_pts_offset", video_pts_offset, "is_video", is_video); } - } - else - { + } else { // AUDIO PACKET if (audio_pts_offset == 99999) // Has the offset been set yet? { // Find the difference between PTS and frame number (no more than 10 timebase units allowed) - audio_pts_offset = 0 - max(packet->pts, (int64_t) info.audio_timebase.ToInt() * 10); + audio_pts_offset = 0 - std::max(packet->pts, (int64_t) info.audio_timebase.ToInt() * 10); // debug output - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::UpdatePTSOffset (Audio)", "audio_pts_offset", audio_pts_offset, "is_video", is_video, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::UpdatePTSOffset (Audio)", "audio_pts_offset", audio_pts_offset, "is_video", is_video); } } } // Convert PTS into Frame Number -int64_t FFmpegReader::ConvertVideoPTStoFrame(int64_t pts) -{ +int64_t FFmpegReader::ConvertVideoPTStoFrame(int64_t pts) { // Apply PTS offset pts = pts + video_pts_offset; int64_t previous_video_frame = current_video_frame; @@ -1494,23 +1902,23 @@ if (frame == previous_video_frame) { // return -1 frame number frame = -1; - } - else + } else { // Increment expected frame current_video_frame++; + } if (current_video_frame < frame) // has missing frames - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ConvertVideoPTStoFrame (detected missing frame)", "calculated frame", frame, "previous_video_frame", previous_video_frame, "current_video_frame", current_video_frame, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ConvertVideoPTStoFrame (detected missing frame)", "calculated frame", frame, "previous_video_frame", previous_video_frame, "current_video_frame", current_video_frame); // Sometimes frames are missing due to varying timestamps, or they were dropped. Determine // if we are missing a video frame. - const GenericScopedLock lock(processingCriticalSection); + const GenericScopedLock lock(processingCriticalSection); while (current_video_frame < frame) { if (!missing_video_frames.count(current_video_frame)) { - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ConvertVideoPTStoFrame (tracking missing frame)", "current_video_frame", current_video_frame, "previous_video_frame", previous_video_frame, "", -1, "", -1, "", -1, "", -1); - missing_video_frames.insert(pair(current_video_frame, previous_video_frame)); - missing_video_frames_source.insert(pair(previous_video_frame, current_video_frame)); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ConvertVideoPTStoFrame (tracking missing frame)", "current_video_frame", current_video_frame, "previous_video_frame", previous_video_frame); + missing_video_frames.insert(std::pair(current_video_frame, previous_video_frame)); + missing_video_frames_source.insert(std::pair(previous_video_frame, current_video_frame)); } // Mark this reader as containing missing frames @@ -1526,8 +1934,7 @@ } // Convert Frame Number into Video PTS -int64_t FFmpegReader::ConvertFrameToVideoPTS(int64_t frame_number) -{ +int64_t FFmpegReader::ConvertFrameToVideoPTS(int64_t frame_number) { // Get timestamp of this frame (in seconds) double seconds = double(frame_number) / info.fps.ToDouble(); @@ -1539,8 +1946,7 @@ } // Convert Frame Number into Video PTS -int64_t FFmpegReader::ConvertFrameToAudioPTS(int64_t frame_number) -{ +int64_t FFmpegReader::ConvertFrameToAudioPTS(int64_t frame_number) { // Get timestamp of this frame (in seconds) double seconds = double(frame_number) / info.fps.ToDouble(); @@ -1552,8 +1958,7 @@ } // Calculate Starting video frame and sample # for an audio PTS -AudioLocation FFmpegReader::GetAudioPTSLocation(int64_t pts) -{ +AudioLocation FFmpegReader::GetAudioPTSLocation(int64_t pts) { // Apply PTS offset pts = pts + audio_pts_offset; @@ -1586,8 +1991,7 @@ // Compare to previous audio packet (and fix small gaps due to varying PTS timestamps) if (previous_packet_location.frame != -1) { - if (location.is_near(previous_packet_location, samples_per_frame, samples_per_frame)) - { + if (location.is_near(previous_packet_location, samples_per_frame, samples_per_frame)) { int64_t orig_frame = location.frame; int orig_start = location.sample_start; @@ -1596,17 +2000,17 @@ location.frame = previous_packet_location.frame; // Debug output - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAudioPTSLocation (Audio Gap Detected)", "Source Frame", orig_frame, "Source Audio Sample", orig_start, "Target Frame", location.frame, "Target Audio Sample", location.sample_start, "pts", pts, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAudioPTSLocation (Audio Gap Detected)", "Source Frame", orig_frame, "Source Audio Sample", orig_start, "Target Frame", location.frame, "Target Audio Sample", location.sample_start, "pts", pts); } else { // Debug output - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAudioPTSLocation (Audio Gap Ignored - too big)", "Previous location frame", previous_packet_location.frame, "Target Frame", location.frame, "Target Audio Sample", location.sample_start, "pts", pts, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAudioPTSLocation (Audio Gap Ignored - too big)", "Previous location frame", previous_packet_location.frame, "Target Frame", location.frame, "Target Audio Sample", location.sample_start, "pts", pts); - const GenericScopedLock lock(processingCriticalSection); + const GenericScopedLock lock(processingCriticalSection); for (int64_t audio_frame = previous_packet_location.frame; audio_frame < location.frame; audio_frame++) { if (!missing_audio_frames.count(audio_frame)) { - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAudioPTSLocation (tracking missing frame)", "missing_audio_frame", audio_frame, "previous_audio_frame", previous_packet_location.frame, "new location frame", location.frame, "", -1, "", -1, "", -1); - missing_audio_frames.insert(pair(previous_packet_location.frame - 1, audio_frame)); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAudioPTSLocation (tracking missing frame)", "missing_audio_frame", audio_frame, "previous_audio_frame", previous_packet_location.frame, "new location frame", location.frame); + missing_audio_frames.insert(std::pair(audio_frame, previous_packet_location.frame - 1)); } } } @@ -1620,12 +2024,18 @@ } // Create a new Frame (or return an existing one) and add it to the working queue. -std::shared_ptr FFmpegReader::CreateFrame(int64_t requested_frame) -{ +std::shared_ptr FFmpegReader::CreateFrame(int64_t requested_frame) { // Check working cache std::shared_ptr output = working_cache.GetFrame(requested_frame); - if (!output) - { + + if (!output) { + // Lock + const GenericScopedLock lock(processingCriticalSection); + + // (re-)Check working cache + output = working_cache.GetFrame(requested_frame); + if(output) return output; + // Create a new frame on the working cache output = std::make_shared(requested_frame, info.width, info.height, "#000000", Frame::GetSamplesPerFrame(requested_frame, info.fps, info.sample_rate, info.channels), info.channels); output->SetPixelRatio(info.pixel_ratio.num, info.pixel_ratio.den); // update pixel ratio @@ -1638,8 +2048,7 @@ if (requested_frame > largest_frame_processed) largest_frame_processed = requested_frame; } - - // Return new frame + // Return frame return output; } @@ -1649,51 +2058,54 @@ // Sometimes a seek gets partial frames, and we need to remove them bool seek_trash = false; int64_t max_seeked_frame = seek_audio_frame_found; // determine max seeked frame - if (seek_video_frame_found > max_seeked_frame) + if (seek_video_frame_found > max_seeked_frame) { max_seeked_frame = seek_video_frame_found; + } if ((info.has_audio && seek_audio_frame_found && max_seeked_frame >= requested_frame) || - (info.has_video && seek_video_frame_found && max_seeked_frame >= requested_frame)) - seek_trash = true; + (info.has_video && seek_video_frame_found && max_seeked_frame >= requested_frame)) { + seek_trash = true; + } return seek_trash; } -// Check if a frame is missing and attempt to replace it's frame image (and -bool FFmpegReader::CheckMissingFrame(int64_t requested_frame) -{ +// Check if a frame is missing and attempt to replace its frame image (and +bool FFmpegReader::CheckMissingFrame(int64_t requested_frame) { // Lock - const GenericScopedLock lock(processingCriticalSection); - - // Init # of times this frame has been checked so far - int checked_count = 0; + const GenericScopedLock lock(processingCriticalSection); // Increment check count for this frame (or init to 1) - if (checked_frames.count(requested_frame) == 0) - checked_frames[requested_frame] = 1; - else - checked_frames[requested_frame]++; - checked_count = checked_frames[requested_frame]; + ++checked_frames[requested_frame]; // Debug output - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::CheckMissingFrame", "requested_frame", requested_frame, "has_missing_frames", has_missing_frames, "missing_video_frames.size()", missing_video_frames.size(), "checked_count", checked_count, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::CheckMissingFrame", "requested_frame", requested_frame, "has_missing_frames", has_missing_frames, "missing_video_frames.size()", missing_video_frames.size(), "checked_count", checked_frames[requested_frame]); // Missing frames (sometimes frame #'s are skipped due to invalid or missing timestamps) - map::iterator itr; + std::map::iterator itr; bool found_missing_frame = false; - // Check if requested frame is a missing frame - if (missing_video_frames.count(requested_frame) || missing_audio_frames.count(requested_frame)) { - int64_t missing_source_frame = -1; - if (missing_video_frames.count(requested_frame)) - missing_source_frame = missing_video_frames.find(requested_frame)->second; - else if (missing_audio_frames.count(requested_frame)) - missing_source_frame = missing_audio_frames.find(requested_frame)->second; + // Special MP3 Handling (ignore more than 1 video frame) + if (info.has_audio and info.has_video) { + AVCodecID aCodecId = AV_FIND_DECODER_CODEC_ID(aStream); + AVCodecID vCodecId = AV_FIND_DECODER_CODEC_ID(pStream); + // If MP3 with single video frame, handle this special case by copying the previously + // decoded image to the new frame. Otherwise, it will spend a huge amount of + // CPU time looking for missing images for all the audio-only frames. + if (checked_frames[requested_frame] > 8 && !missing_video_frames.count(requested_frame) && + !processing_audio_frames.count(requested_frame) && processed_audio_frames.count(requested_frame) && + last_frame && last_video_frame && last_video_frame->has_image_data && aCodecId == AV_CODEC_ID_MP3 && (vCodecId == AV_CODEC_ID_MJPEGB || vCodecId == AV_CODEC_ID_MJPEG)) { + missing_video_frames.insert(std::pair(requested_frame, last_video_frame->number)); + missing_video_frames_source.insert(std::pair(last_video_frame->number, requested_frame)); + missing_frames.Add(last_video_frame); + } + } + + // Check if requested video frame is a missing + if (missing_video_frames.count(requested_frame)) { + int64_t missing_source_frame = missing_video_frames.find(requested_frame)->second; // Increment missing source frame check count (or init to 1) - if (checked_frames.count(missing_source_frame) == 0) - checked_frames[missing_source_frame] = 1; - else - checked_frames[missing_source_frame]++; + ++checked_frames[missing_source_frame]; // Get the previous frame of this missing frame (if it's available in missing cache) std::shared_ptr parent_frame = missing_frames.GetFrame(missing_source_frame); @@ -1709,46 +2121,52 @@ std::shared_ptr missing_frame = CreateFrame(requested_frame); // Debug output - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::CheckMissingFrame (Is Previous Video Frame Final)", "requested_frame", requested_frame, "missing_frame->number", missing_frame->number, "missing_source_frame", missing_source_frame, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::CheckMissingFrame (Is Previous Video Frame Final)", "requested_frame", requested_frame, "missing_frame->number", missing_frame->number, "missing_source_frame", missing_source_frame); // If previous frame found, copy image from previous to missing frame (else we'll just wait a bit and try again later) if (parent_frame != NULL) { // Debug output - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::CheckMissingFrame (AddImage from Previous Video Frame)", "requested_frame", requested_frame, "missing_frame->number", missing_frame->number, "missing_source_frame", missing_source_frame, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::CheckMissingFrame (AddImage from Previous Video Frame)", "requested_frame", requested_frame, "missing_frame->number", missing_frame->number, "missing_source_frame", missing_source_frame); // Add this frame to the processed map (since it's already done) std::shared_ptr parent_image = parent_frame->GetImage(); if (parent_image) { missing_frame->AddImage(std::shared_ptr(new QImage(*parent_image))); - processed_video_frames[missing_frame->number] = missing_frame->number; - processed_audio_frames[missing_frame->number] = missing_frame->number; + } + } + } - // Move frame to final cache - final_cache.Add(missing_frame); + // Check if requested audio frame is a missing + if (missing_audio_frames.count(requested_frame)) { - // Remove frame from working cache - working_cache.Remove(missing_frame->number); + // Create blank missing frame + std::shared_ptr missing_frame = CreateFrame(requested_frame); - // Update last_frame processed - last_frame = missing_frame->number; - } - } + // Get Samples per frame (for this frame number) + int samples_per_frame = Frame::GetSamplesPerFrame(missing_frame->number, info.fps, info.sample_rate, info.channels); + // Debug output + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::CheckMissingFrame (Add Silence for Missing Audio Frame)", "requested_frame", requested_frame, "missing_frame->number", missing_frame->number, "samples_per_frame", samples_per_frame); + + // Add this frame to the processed map (since it's already done) + missing_frame->AddAudioSilence(samples_per_frame); + processed_audio_frames[missing_frame->number] = missing_frame->number; } return found_missing_frame; } // Check the working queue, and move finished frames to the finished queue -void FFmpegReader::CheckWorkingFrames(bool end_of_stream, int64_t requested_frame) -{ +void FFmpegReader::CheckWorkingFrames(bool end_of_stream, int64_t requested_frame) { // Loop through all working queue frames - bool checked_count_tripped = false; - int max_checked_count = 80; + bool checked_count_tripped = false; + int max_checked_count = 80; - while (true) - { + // Check if requested frame is 'missing' + CheckMissingFrame(requested_frame); + + while (true) { // Get the front frame of working cache std::shared_ptr f(working_cache.GetSmallestFrame()); @@ -1772,17 +2190,17 @@ bool is_video_ready = false; bool is_audio_ready = false; { // limit scope of next few lines - const GenericScopedLock lock(processingCriticalSection); + const GenericScopedLock lock(processingCriticalSection); is_video_ready = processed_video_frames.count(f->number); is_audio_ready = processed_audio_frames.count(f->number); // Get check count for this frame checked_frames_size = checked_frames.size(); - if (!checked_count_tripped || f->number >= requested_frame) - checked_count = checked_frames[f->number]; - else - // Force checked count over the limit - checked_count = max_checked_count; + if (!checked_count_tripped || f->number >= requested_frame) + checked_count = checked_frames[f->number]; + else + // Force checked count over the limit + checked_count = max_checked_count; } if (previous_packet_location.frame == f->number && !end_of_stream) @@ -1798,8 +2216,8 @@ // Debug output ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::CheckWorkingFrames (exceeded checked_count)", "requested_frame", requested_frame, "frame_number", f->number, "is_video_ready", is_video_ready, "is_audio_ready", is_audio_ready, "checked_count", checked_count, "checked_frames_size", checked_frames_size); - // Trigger checked count tripped mode (clear out all frames before requested frame) - checked_count_tripped = true; + // Trigger checked count tripped mode (clear out all frames before requested frame) + checked_count_tripped = true; if (info.has_video && !is_video_ready && last_video_frame) { // Copy image from last frame @@ -1817,13 +2235,11 @@ ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::CheckWorkingFrames", "requested_frame", requested_frame, "frame_number", f->number, "is_video_ready", is_video_ready, "is_audio_ready", is_audio_ready, "checked_count", checked_count, "checked_frames_size", checked_frames_size); // Check if working frame is final - if ((!end_of_stream && is_video_ready && is_audio_ready) || end_of_stream || is_seek_trash) - { + if ((!end_of_stream && is_video_ready && is_audio_ready) || end_of_stream || is_seek_trash) { // Debug output ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::CheckWorkingFrames (mark frame as final)", "requested_frame", requested_frame, "f->number", f->number, "is_seek_trash", is_seek_trash, "Working Cache Count", working_cache.Count(), "Final Cache Count", final_cache.Count(), "end_of_stream", end_of_stream); - if (!is_seek_trash) - { + if (!is_seek_trash) { // Add missing image (if needed - sometimes end_of_stream causes frames with only audio) if (info.has_video && !is_video_ready && last_video_frame) // Copy image from last frame @@ -1837,15 +2253,15 @@ // Add to missing cache (if another frame depends on it) { - const GenericScopedLock lock(processingCriticalSection); + const GenericScopedLock lock(processingCriticalSection); if (missing_video_frames_source.count(f->number)) { // Debug output - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::CheckWorkingFrames (add frame to missing cache)", "f->number", f->number, "is_seek_trash", is_seek_trash, "Missing Cache Count", missing_frames.Count(), "Working Cache Count", working_cache.Count(), "Final Cache Count", final_cache.Count(), "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::CheckWorkingFrames (add frame to missing cache)", "f->number", f->number, "is_seek_trash", is_seek_trash, "Missing Cache Count", missing_frames.Count(), "Working Cache Count", working_cache.Count(), "Final Cache Count", final_cache.Count()); missing_frames.Add(f); } - // Remove from 'checked' count - checked_frames.erase(f->number); + // Remove from 'checked' count + checked_frames.erase(f->number); } // Remove frame from working cache @@ -1858,18 +2274,19 @@ // Seek trash, so delete the frame from the working cache, and never add it to the final cache. working_cache.Remove(f->number); } - } - else + + } else { // Stop looping break; + } } } // Check for the correct frames per second (FPS) value by scanning the 1st few seconds of video packets. -void FFmpegReader::CheckFPS() -{ +void FFmpegReader::CheckFPS() { check_fps = true; + int first_second_counter = 0; int second_second_counter = 0; int third_second_counter = 0; @@ -1879,19 +2296,16 @@ int64_t pts = 0; // Loop through the stream - while (true) - { + while (true) { // Get the next packet (if any) if (GetNextPacket() < 0) // Break loop when no more packets found break; // Video packet - if (packet->stream_index == videoStream) - { + if (packet->stream_index == videoStream) { // Check if the AVFrame is finished and set it - if (GetAVFrame()) - { + if (GetAVFrame()) { // Update PTS / Frame Offset (if any) UpdatePTSOffset(true); @@ -1965,36 +2379,37 @@ } } -// Remove AVFrame from cache (and deallocate it's memory) -void FFmpegReader::RemoveAVFrame(AVFrame* remove_frame) -{ - // Remove pFrame (if exists) - if (remove_frame) - { - // Free memory - av_freep(&remove_frame->data[0]); +// Remove AVFrame from cache (and deallocate its memory) +void FFmpegReader::RemoveAVFrame(AVFrame *remove_frame) { + // Remove pFrame (if exists) + if (remove_frame) { + // Free memory +#pragma omp critical (packet_cache) + { + av_freep(&remove_frame->data[0]); +#ifndef WIN32 + AV_FREE_FRAME(&remove_frame); +#endif + } } } -// Remove AVPacket from cache (and deallocate it's memory) -void FFmpegReader::RemoveAVPacket(AVPacket* remove_packet) -{ +// Remove AVPacket from cache (and deallocate its memory) +void FFmpegReader::RemoveAVPacket(AVPacket *remove_packet) { // deallocate memory for packet - AV_FREE_PACKET(remove_packet); + AV_FREE_PACKET(remove_packet); // Delete the object delete remove_packet; } /// Get the smallest video frame that is still being processed -int64_t FFmpegReader::GetSmallestVideoFrame() -{ +int64_t FFmpegReader::GetSmallestVideoFrame() { // Loop through frame numbers - map::iterator itr; + std::map::iterator itr; int64_t smallest_frame = -1; - const GenericScopedLock lock(processingCriticalSection); - for(itr = processing_video_frames.begin(); itr != processing_video_frames.end(); ++itr) - { + const GenericScopedLock lock(processingCriticalSection); + for (itr = processing_video_frames.begin(); itr != processing_video_frames.end(); ++itr) { if (itr->first < smallest_frame || smallest_frame == -1) smallest_frame = itr->first; } @@ -2004,14 +2419,12 @@ } /// Get the smallest audio frame that is still being processed -int64_t FFmpegReader::GetSmallestAudioFrame() -{ +int64_t FFmpegReader::GetSmallestAudioFrame() { // Loop through frame numbers - map::iterator itr; + std::map::iterator itr; int64_t smallest_frame = -1; - const GenericScopedLock lock(processingCriticalSection); - for(itr = processing_audio_frames.begin(); itr != processing_audio_frames.end(); ++itr) - { + const GenericScopedLock lock(processingCriticalSection); + for (itr = processing_audio_frames.begin(); itr != processing_audio_frames.end(); ++itr) { if (itr->first < smallest_frame || smallest_frame == -1) smallest_frame = itr->first; } @@ -2021,14 +2434,14 @@ } // Generate JSON string of this object -string FFmpegReader::Json() { +std::string FFmpegReader::Json() const { // Return formatted string return JsonValue().toStyledString(); } -// Generate Json::JsonValue for this object -Json::Value FFmpegReader::JsonValue() { +// Generate Json::Value for this object +Json::Value FFmpegReader::JsonValue() const { // Create root json object Json::Value root = ReaderBase::JsonValue(); // get parent properties @@ -2040,30 +2453,22 @@ } // Load JSON string into this object -void FFmpegReader::SetJson(string value) { +void FFmpegReader::SetJson(const std::string value) { // Parse JSON string into JSON objects - Json::Value root; - Json::Reader reader; - bool success = reader.parse( value, root ); - if (!success) - // Raise exception - throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); - - try - { + try { + const Json::Value root = openshot::stringToJson(value); // Set all values that match SetJsonValue(root); } - catch (exception e) - { + catch (const std::exception& e) { // Error parsing JSON (or missing keys) - throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); } } -// Load Json::JsonValue into this object -void FFmpegReader::SetJsonValue(Json::Value root) { +// Load Json::Value into this object +void FFmpegReader::SetJsonValue(const Json::Value root) { // Set parent data ReaderBase::SetJsonValue(root); @@ -2073,8 +2478,7 @@ path = root["path"].asString(); // Re-Open path, and re-init everything (if needed) - if (is_open) - { + if (is_open) { Close(); Open(); } diff -Nru libopenshot-0.2.2+dfsg1/src/FFmpegWriter.cpp libopenshot-0.2.5+dfsg1/src/FFmpegWriter.cpp --- libopenshot-0.2.2+dfsg1/src/FFmpegWriter.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/FFmpegWriter.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for FFmpegWriter class * @author Jonathan Thomas , Fabrice Bellard * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2013 OpenShot Studios, LLC, Fabrice Bellard + * Copyright (c) 2008-2019 OpenShot Studios, LLC, Fabrice Bellard * (http://www.openshotstudios.com). This file is part of * OpenShot Library (http://www.openshot.org), an open-source project * dedicated to delivering high quality video editing and animation solutions @@ -32,14 +35,61 @@ using namespace openshot; -FFmpegWriter::FFmpegWriter(string path) : +#if HAVE_HW_ACCEL +#pragma message "You are compiling with experimental hardware encode" +#else +#pragma message "You are compiling only with software encode" +#endif + +// Multiplexer parameters temporary storage +AVDictionary *mux_dict = NULL; + +#if HAVE_HW_ACCEL +int hw_en_on = 1; // Is set in UI +int hw_en_supported = 0; // Is set by FFmpegWriter +AVPixelFormat hw_en_av_pix_fmt = AV_PIX_FMT_NONE; +AVHWDeviceType hw_en_av_device_type = AV_HWDEVICE_TYPE_VAAPI; +static AVBufferRef *hw_device_ctx = NULL; +AVFrame *hw_frame = NULL; + +static int set_hwframe_ctx(AVCodecContext *ctx, AVBufferRef *hw_device_ctx, int64_t width, int64_t height) +{ + AVBufferRef *hw_frames_ref; + AVHWFramesContext *frames_ctx = NULL; + int err = 0; + + if (!(hw_frames_ref = av_hwframe_ctx_alloc(hw_device_ctx))) { + fprintf(stderr, "Failed to create HW frame context.\n"); + return -1; + } + frames_ctx = (AVHWFramesContext *)(hw_frames_ref->data); + frames_ctx->format = hw_en_av_pix_fmt; + frames_ctx->sw_format = AV_PIX_FMT_NV12; + frames_ctx->width = width; + frames_ctx->height = height; + frames_ctx->initial_pool_size = 20; + if ((err = av_hwframe_ctx_init(hw_frames_ref)) < 0) { + fprintf(stderr, "Failed to initialize HW frame context." + "Error code: %s\n",av_err2str(err)); + av_buffer_unref(&hw_frames_ref); + return err; + } + ctx->hw_frames_ctx = av_buffer_ref(hw_frames_ref); + if (!ctx->hw_frames_ctx) + err = AVERROR(ENOMEM); + + av_buffer_unref(&hw_frames_ref); + return err; +} +#endif // HAVE_HW_ACCEL + +FFmpegWriter::FFmpegWriter(std::string path) : path(path), fmt(NULL), oc(NULL), audio_st(NULL), video_st(NULL), audio_pts(0), video_pts(0), samples(NULL), audio_outbuf(NULL), audio_outbuf_size(0), audio_input_frame_size(0), audio_input_position(0), initial_audio_input_frame_size(0), img_convert_ctx(NULL), cache_size(8), num_of_rescalers(32), rescaler_position(0), video_codec(NULL), audio_codec(NULL), is_writing(false), write_video_count(0), write_audio_count(0), original_sample_rate(0), original_channels(0), avr(NULL), avr_planar(NULL), is_open(false), prepare_streams(false), - write_header(false), write_trailer(false), audio_encoder_buffer_size(0), audio_encoder_buffer(NULL) -{ + write_header(false), write_trailer(false), audio_encoder_buffer_size(0), audio_encoder_buffer(NULL) { // Disable audio & video (so they can be independently enabled) info.has_audio = false; @@ -53,23 +103,29 @@ } // Open the writer -void FFmpegWriter::Open() -{ - // Open the writer - is_open = true; - - // Prepare streams (if needed) - if (!prepare_streams) - PrepareStreams(); - - // Write header (if needed) - if (!write_header) - WriteHeader(); +void FFmpegWriter::Open() { + if (!is_open) { + // Open the writer + is_open = true; + + // Prepare streams (if needed) + if (!prepare_streams) + PrepareStreams(); + + // Now that all the parameters are set, we can open the audio and video codecs and allocate the necessary encode buffers + if (info.has_video && video_st) + open_video(oc, video_st); + if (info.has_audio && audio_st) + open_audio(oc, audio_st); + + // Write header (if needed) + if (!write_header) + WriteHeader(); + } } // auto detect format (from path) -void FFmpegWriter::auto_detect_format() -{ +void FFmpegWriter::auto_detect_format() { // Auto detect the output format from the name. default is mpeg. fmt = av_guess_format(NULL, path.c_str(), NULL); if (!fmt) @@ -94,9 +150,8 @@ } // initialize streams -void FFmpegWriter::initialize_streams() -{ - ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::initialize_streams", "fmt->video_codec", fmt->video_codec, "fmt->audio_codec", fmt->audio_codec, "AV_CODEC_ID_NONE", AV_CODEC_ID_NONE, "", -1, "", -1, "", -1); +void FFmpegWriter::initialize_streams() { + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::initialize_streams", "fmt->video_codec", fmt->video_codec, "fmt->audio_codec", fmt->audio_codec, "AV_CODEC_ID_NONE", AV_CODEC_ID_NONE); // Add the audio and video streams using the default format codecs and initialize the codecs video_st = NULL; @@ -111,12 +166,66 @@ } // Set video export options -void FFmpegWriter::SetVideoOptions(bool has_video, string codec, Fraction fps, int width, int height, Fraction pixel_ratio, bool interlaced, bool top_field_first, int bit_rate) -{ +void FFmpegWriter::SetVideoOptions(bool has_video, std::string codec, Fraction fps, int width, int height, Fraction pixel_ratio, bool interlaced, bool top_field_first, int bit_rate) { // Set the video options - if (codec.length() > 0) - { - AVCodec *new_codec = avcodec_find_encoder_by_name(codec.c_str()); + if (codec.length() > 0) { + AVCodec *new_codec; + // Check if the codec selected is a hardware accelerated codec +#if HAVE_HW_ACCEL +#if defined(__linux__) + if (strstr(codec.c_str(), "_vaapi") != NULL) { + new_codec = avcodec_find_encoder_by_name(codec.c_str()); + hw_en_on = 1; + hw_en_supported = 1; + hw_en_av_pix_fmt = AV_PIX_FMT_VAAPI; + hw_en_av_device_type = AV_HWDEVICE_TYPE_VAAPI; + } else if (strstr(codec.c_str(), "_nvenc") != NULL) { + new_codec = avcodec_find_encoder_by_name(codec.c_str()); + hw_en_on = 1; + hw_en_supported = 1; + hw_en_av_pix_fmt = AV_PIX_FMT_CUDA; + hw_en_av_device_type = AV_HWDEVICE_TYPE_CUDA; + } else { + new_codec = avcodec_find_encoder_by_name(codec.c_str()); + hw_en_on = 0; + hw_en_supported = 0; + } +#elif defined(_WIN32) + if (strstr(codec.c_str(), "_dxva2") != NULL) { + new_codec = avcodec_find_encoder_by_name(codec.c_str()); + hw_en_on = 1; + hw_en_supported = 1; + hw_en_av_pix_fmt = AV_PIX_FMT_DXVA2_VLD; + hw_en_av_device_type = AV_HWDEVICE_TYPE_DXVA2; + } else if (strstr(codec.c_str(), "_nvenc") != NULL) { + new_codec = avcodec_find_encoder_by_name(codec.c_str()); + hw_en_on = 1; + hw_en_supported = 1; + hw_en_av_pix_fmt = AV_PIX_FMT_CUDA; + hw_en_av_device_type = AV_HWDEVICE_TYPE_CUDA; + } else { + new_codec = avcodec_find_encoder_by_name(codec.c_str()); + hw_en_on = 0; + hw_en_supported = 0; + } +#elif defined(__APPLE__) + if (strstr(codec.c_str(), "_videotoolbox") != NULL) { + new_codec = avcodec_find_encoder_by_name(codec.c_str()); + hw_en_on = 1; + hw_en_supported = 1; + hw_en_av_pix_fmt = AV_PIX_FMT_VIDEOTOOLBOX; + hw_en_av_device_type = AV_HWDEVICE_TYPE_VIDEOTOOLBOX; + } else { + new_codec = avcodec_find_encoder_by_name(codec.c_str()); + hw_en_on = 0; + hw_en_supported = 0; + } + #else // is FFmpeg 3 but not linux + new_codec = avcodec_find_encoder_by_name(codec.c_str()); + #endif //__linux__ +#else // not ffmpeg 3 + new_codec = avcodec_find_encoder_by_name(codec.c_str()); +#endif // HAVE_HW_ACCEL if (new_codec == NULL) throw InvalidCodec("A valid video codec could not be found for this file.", path); else { @@ -127,8 +236,7 @@ fmt->video_codec = new_codec->id; } } - if (fps.num > 0) - { + if (fps.num > 0) { // Set frames per second (if provided) info.fps.num = fps.num; info.fps.den = fps.den; @@ -141,12 +249,13 @@ info.width = width; if (height >= 1) info.height = height; - if (pixel_ratio.num > 0) - { + if (pixel_ratio.num > 0) { info.pixel_ratio.num = pixel_ratio.num; info.pixel_ratio.den = pixel_ratio.den; } - if (bit_rate >= 1000) + if (bit_rate >= 1000) // bit_rate is the bitrate in b/s + info.video_bit_rate = bit_rate; + if ((bit_rate >= 0) && (bit_rate < 64)) // bit_rate is the bitrate in crf info.video_bit_rate = bit_rate; info.interlaced_frame = interlaced; @@ -168,17 +277,22 @@ info.has_video = has_video; } +// Set video export options (overloaded function) +void FFmpegWriter::SetVideoOptions(std::string codec, int width, int height, Fraction fps, int bit_rate) { + // Call full signature with some default parameters + FFmpegWriter::SetVideoOptions(true, codec, fps, width, height, + openshot::Fraction(1, 1), false, true, bit_rate); +} + + // Set audio export options -void FFmpegWriter::SetAudioOptions(bool has_audio, string codec, int sample_rate, int channels, ChannelLayout channel_layout, int bit_rate) -{ +void FFmpegWriter::SetAudioOptions(bool has_audio, std::string codec, int sample_rate, int channels, ChannelLayout channel_layout, int bit_rate) { // Set audio options - if (codec.length() > 0) - { + if (codec.length() > 0) { AVCodec *new_codec = avcodec_find_encoder_by_name(codec.c_str()); if (new_codec == NULL) throw InvalidCodec("A valid audio codec could not be found for this file.", path); - else - { + else { // Set audio codec info.acodec = new_codec->name; @@ -200,31 +314,37 @@ if (original_channels == 0) original_channels = info.channels; - ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::SetAudioOptions (" + codec + ")", "sample_rate", sample_rate, "channels", channels, "bit_rate", bit_rate, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::SetAudioOptions (" + codec + ")", "sample_rate", sample_rate, "channels", channels, "bit_rate", bit_rate); // Enable / Disable audio info.has_audio = has_audio; } + +// Set audio export options (overloaded function) +void FFmpegWriter::SetAudioOptions(std::string codec, int sample_rate, int bit_rate) { + // Call full signature with some default parameters + FFmpegWriter::SetAudioOptions(true, codec, sample_rate, 2, + openshot::LAYOUT_STEREO, bit_rate); +} + + // Set custom options (some codecs accept additional params) -void FFmpegWriter::SetOption(StreamType stream, string name, string value) -{ +void FFmpegWriter::SetOption(StreamType stream, std::string name, std::string value) { // Declare codec context AVCodecContext *c = NULL; AVStream *st = NULL; - stringstream convert(value); + std::stringstream convert(value); if (info.has_video && stream == VIDEO_STREAM && video_st) { st = video_st; // Get codec context c = AV_GET_CODEC_PAR_CONTEXT(st, video_codec); - } - else if (info.has_audio && stream == AUDIO_STREAM && audio_st) { + } else if (info.has_audio && stream == AUDIO_STREAM && audio_st) { st = audio_st; // Get codec context c = AV_GET_CODEC_PAR_CONTEXT(st, audio_codec); - } - else + } else throw NoStreamsFound("The stream was not found. Be sure to call PrepareStreams() first.", path); // Init AVOption @@ -237,8 +357,8 @@ // Was option found? if (option || (name == "g" || name == "qmin" || name == "qmax" || name == "max_b_frames" || name == "mb_decision" || - name == "level" || name == "profile" || name == "slices" || name == "rc_min_rate" || name == "rc_max_rate")) - { + name == "level" || name == "profile" || name == "slices" || name == "rc_min_rate" || name == "rc_max_rate" || + name == "rc_buffer_size" || name == "crf" || name == "cqp")) { // Check for specific named options if (name == "g") // Set gop_size @@ -284,20 +404,149 @@ // Buffer size convert >> c->rc_buffer_size; - else + else if (name == "cqp") { + // encode quality and special settings like lossless + // This might be better in an extra methods as more options + // and way to set quality are possible + #if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(55, 39, 101) + #if HAVE_HW_ACCEL + if (hw_en_on) { + av_opt_set_int(c->priv_data, "qp", std::min(std::stoi(value),63), 0); // 0-63 + } else + #endif // HAVE_HW_ACCEL + { + switch (c->codec_id) { + #if (LIBAVCODEC_VERSION_MAJOR >= 58) + case AV_CODEC_ID_AV1 : + c->bit_rate = 0; + av_opt_set_int(c->priv_data, "qp", std::min(std::stoi(value),63), 0); // 0-63 + break; + #endif + case AV_CODEC_ID_VP8 : + c->bit_rate = 10000000; + av_opt_set_int(c->priv_data, "qp", std::max(std::min(std::stoi(value), 63), 4), 0); // 4-63 + break; + case AV_CODEC_ID_VP9 : + c->bit_rate = 0; // Must be zero! + av_opt_set_int(c->priv_data, "qp", std::min(std::stoi(value), 63), 0); // 0-63 + if (std::stoi(value) == 0) { + av_opt_set(c->priv_data, "preset", "veryslow", 0); + av_opt_set_int(c->priv_data, "lossless", 1, 0); + } + break; + case AV_CODEC_ID_H264 : + av_opt_set_int(c->priv_data, "qp", std::min(std::stoi(value), 51), 0); // 0-51 + if (std::stoi(value) == 0) { + av_opt_set(c->priv_data, "preset", "veryslow", 0); + } + break; + case AV_CODEC_ID_HEVC : + av_opt_set_int(c->priv_data, "qp", std::min(std::stoi(value), 51), 0); // 0-51 + if (std::stoi(value) == 0) { + av_opt_set(c->priv_data, "preset", "veryslow", 0); + av_opt_set_int(c->priv_data, "lossless", 1, 0); + } + break; + default: + // For all other codecs assume a range of 0-63 + av_opt_set_int(c->priv_data, "qp", std::min(std::stoi(value), 63), 0); // 0-63 + c->bit_rate = 0; + } + } + #endif + } else if (name == "crf") { + // encode quality and special settings like lossless + // This might be better in an extra methods as more options + // and way to set quality are possible +#if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(55, 39, 101) +#if HAVE_HW_ACCEL + if (hw_en_on) { + double mbs = 15000000.0; + if (info.video_bit_rate > 0) { + if (info.video_bit_rate > 42) { + mbs = 380000.0; + } + else { + mbs *= std::pow(0.912,info.video_bit_rate); + } + } + c->bit_rate = (int)(mbs); + } else +#endif // HAVE_HW_ACCEL + { + switch (c->codec_id) { +#if (LIBAVCODEC_VERSION_MAJOR >= 58) + case AV_CODEC_ID_AV1 : + c->bit_rate = 0; + av_opt_set_int(c->priv_data, "crf", std::min(std::stoi(value),63), 0); + break; +#endif + case AV_CODEC_ID_VP8 : + c->bit_rate = 10000000; + av_opt_set_int(c->priv_data, "crf", std::max(std::min(std::stoi(value), 63), 4), 0); // 4-63 + break; + case AV_CODEC_ID_VP9 : + c->bit_rate = 0; // Must be zero! + av_opt_set_int(c->priv_data, "crf", std::min(std::stoi(value), 63), 0); // 0-63 + if (std::stoi(value) == 0) { + av_opt_set(c->priv_data, "preset", "veryslow", 0); + av_opt_set_int(c->priv_data, "lossless", 1, 0); + } + break; + case AV_CODEC_ID_H264 : + av_opt_set_int(c->priv_data, "crf", std::min(std::stoi(value), 51), 0); // 0-51 + if (std::stoi(value) == 0) { + av_opt_set(c->priv_data, "preset", "veryslow", 0); + } + break; + case AV_CODEC_ID_HEVC : + av_opt_set_int(c->priv_data, "crf", std::min(std::stoi(value), 51), 0); // 0-51 + if (std::stoi(value) == 0) { + av_opt_set(c->priv_data, "preset", "veryslow", 0); + av_opt_set_int(c->priv_data, "lossless", 1, 0); + } + break; + default: + // If this codec doesn't support crf calculate a bitrate + // TODO: find better formula + double mbs = 15000000.0; + if (info.video_bit_rate > 0) { + if (info.video_bit_rate > 42) { + mbs = 380000.0; + } else { + mbs *= std::pow(0.912, info.video_bit_rate); + } + } + c->bit_rate = (int) (mbs); + } + } +#endif + } else { // Set AVOption AV_OPTION_SET(st, c->priv_data, name.c_str(), value.c_str(), c); + } - ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::SetOption (" + (string)name + ")", "stream == VIDEO_STREAM", stream == VIDEO_STREAM, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::SetOption (" + (std::string)name + ")", "stream == VIDEO_STREAM", stream == VIDEO_STREAM); - } - else + // Muxing dictionary is not part of the codec context. + // Just reusing SetOption function to set popular multiplexing presets. + } else if (name == "muxing_preset") { + if (value == "mp4_faststart") { + // 'moov' box to the beginning; only for MOV, MP4 + av_dict_set(&mux_dict, "movflags", "faststart", 0); + } else if (value == "mp4_fragmented") { + // write selfcontained fragmented file, minimum length of the fragment 8 sec; only for MOV, MP4 + av_dict_set(&mux_dict, "movflags", "frag_keyframe", 0); + av_dict_set(&mux_dict, "min_frag_duration", "8000000", 0); + } + } else { throw InvalidOptions("The option is not valid for this codec.", path); + } } /// Determine if codec name is valid -bool FFmpegWriter::IsValidCodec(string codec_name) { +bool FFmpegWriter::IsValidCodec(std::string codec_name) { // Initialize FFMpeg, and register all formats and codecs AV_REGISTER_ALL @@ -309,29 +558,21 @@ } // Prepare & initialize streams and open codecs -void FFmpegWriter::PrepareStreams() -{ +void FFmpegWriter::PrepareStreams() { if (!info.has_audio && !info.has_video) throw InvalidOptions("No video or audio options have been set. You must set has_video or has_audio (or both).", path); - ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::PrepareStreams [" + path + "]", "info.has_audio", info.has_audio, "info.has_video", info.has_video, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::PrepareStreams [" + path + "]", "info.has_audio", info.has_audio, "info.has_video", info.has_video); // Initialize the streams (i.e. add the streams) initialize_streams(); - // Now that all the parameters are set, we can open the audio and video codecs and allocate the necessary encode buffers - if (info.has_video && video_st) - open_video(oc, video_st); - if (info.has_audio && audio_st) - open_audio(oc, audio_st); - // Mark as 'prepared' prepare_streams = true; } // Write the file header (after the options are set) -void FFmpegWriter::WriteHeader() -{ +void FFmpegWriter::WriteHeader() { if (!info.has_audio && !info.has_video) throw InvalidOptions("No video or audio options have been set. You must set has_video or has_audio (or both).", path); @@ -341,31 +582,41 @@ throw InvalidFile("Could not open or write file.", path); } - // Force the output filename (which doesn't always happen for some reason) - snprintf(oc->AV_FILENAME, sizeof(oc->AV_FILENAME), "%s", path.c_str()); - - // Write the stream header, if any - // TODO: add avoptions / parameters instead of NULL + // Force the output filename (which doesn't always happen for some reason) + AV_SET_FILENAME(oc, path.c_str()); // Add general metadata (if any) - for(std::map::iterator iter = info.metadata.begin(); iter != info.metadata.end(); ++iter) - { + for (std::map::iterator iter = info.metadata.begin(); iter != info.metadata.end(); ++iter) { av_dict_set(&oc->metadata, iter->first.c_str(), iter->second.c_str(), 0); } - if (avformat_write_header(oc, NULL) != 0) { - throw InvalidFile("Could not write header to file.", path); + // Set multiplexing parameters + AVDictionary *dict = NULL; + + bool is_mp4 = strcmp(oc->oformat->name, "mp4"); + bool is_mov = strcmp(oc->oformat->name, "mov"); + // Set dictionary preset only for MP4 and MOV files + if (is_mp4 || is_mov) + av_dict_copy(&dict, mux_dict, 0); + + // Write the stream header + if (avformat_write_header(oc, &dict) != 0) { + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::WriteHeader (avformat_write_header)"); + throw InvalidFile("Could not write header to file.", path); }; + // Free multiplexing dictionaries sets + if (dict) av_dict_free(&dict); + if (mux_dict) av_dict_free(&mux_dict); + // Mark as 'written' write_header = true; - ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::WriteHeader", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::WriteHeader"); } // Add a frame to the queue waiting to be encoded. -void FFmpegWriter::WriteFrame(std::shared_ptr frame) -{ +void FFmpegWriter::WriteFrame(std::shared_ptr frame) { // Check for open reader (or throw exception) if (!is_open) throw WriterClosed("The FFmpegWriter is closed. Call Open() before calling this method.", path); @@ -378,18 +629,16 @@ if (info.has_audio && audio_st) spooled_audio_frames.push_back(frame); - ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::WriteFrame", "frame->number", frame->number, "spooled_video_frames.size()", spooled_video_frames.size(), "spooled_audio_frames.size()", spooled_audio_frames.size(), "cache_size", cache_size, "is_writing", is_writing, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::WriteFrame", "frame->number", frame->number, "spooled_video_frames.size()", spooled_video_frames.size(), "spooled_audio_frames.size()", spooled_audio_frames.size(), "cache_size", cache_size, "is_writing", is_writing); // Write the frames once it reaches the correct cache size - if (spooled_video_frames.size() == cache_size || spooled_audio_frames.size() == cache_size) - { + if ((int)spooled_video_frames.size() == cache_size || (int)spooled_audio_frames.size() == cache_size) { // Is writer currently writing? if (!is_writing) // Write frames to video file write_queued_frames(); - else - { + else { // Write frames to video file write_queued_frames(); } @@ -400,9 +649,8 @@ } // Write all frames in the queue to the video file. -void FFmpegWriter::write_queued_frames() -{ - ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_queued_frames", "spooled_video_frames.size()", spooled_video_frames.size(), "spooled_audio_frames.size()", spooled_audio_frames.size(), "", -1, "", -1, "", -1, "", -1); +void FFmpegWriter::write_queued_frames() { + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_queued_frames", "spooled_video_frames.size()", spooled_video_frames.size(), "spooled_audio_frames.size()", spooled_audio_frames.size()); // Flip writing flag is_writing = true; @@ -423,17 +671,16 @@ // Create blank exception bool has_error_encoding_video = false; - #pragma omp parallel +#pragma omp parallel { - #pragma omp single +#pragma omp single { // Process all audio frames (in a separate thread) if (info.has_audio && audio_st && !queued_audio_frames.empty()) write_audio_packets(false); // Loop through each queued image frame - while (!queued_video_frames.empty()) - { + while (!queued_video_frames.empty()) { // Get front frame (from the queue) std::shared_ptr frame = queued_video_frames.front(); @@ -450,22 +697,19 @@ } // end while } // end omp single - #pragma omp single +#pragma omp single { // Loop back through the frames (in order), and write them to the video file - while (!processed_frames.empty()) - { + while (!processed_frames.empty()) { // Get front frame (from the queue) std::shared_ptr frame = processed_frames.front(); - if (info.has_video && video_st) - { + if (info.has_video && video_st) { // Add to deallocate queue (so we can remove the AVFrames when we are done) deallocate_frames.push_back(frame); // Does this frame's AVFrame still exist - if (av_frames.count(frame)) - { + if (av_frames.count(frame)) { // Get AVFrame AVFrame *frame_final = av_frames[frame]; @@ -481,14 +725,12 @@ } // Loop through, and deallocate AVFrames - while (!deallocate_frames.empty()) - { + while (!deallocate_frames.empty()) { // Get front frame (from the queue) std::shared_ptr frame = deallocate_frames.front(); // Does this frame's AVFrame still exist - if (av_frames.count(frame)) - { + if (av_frames.count(frame)) { // Get AVFrame AVFrame *av_frame = av_frames[frame]; @@ -506,6 +748,7 @@ is_writing = false; } // end omp single + } // end omp parallel // Raise exception from main thread @@ -514,13 +757,11 @@ } // Write a block of frames from a reader -void FFmpegWriter::WriteFrame(ReaderBase* reader, int64_t start, int64_t length) -{ - ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::WriteFrame (from Reader)", "start", start, "length", length, "", -1, "", -1, "", -1, "", -1); +void FFmpegWriter::WriteFrame(ReaderBase *reader, int64_t start, int64_t length) { + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::WriteFrame (from Reader)", "start", start, "length", length); // Loop through each frame (and encoded it) - for (int64_t number = start; number <= length; number++) - { + for (int64_t number = start; number <= length; number++) { // Get the frame std::shared_ptr f = reader->GetFrame(number); @@ -530,8 +771,7 @@ } // Write the file trailer (after all frames are written) -void FFmpegWriter::WriteTrailer() -{ +void FFmpegWriter::WriteTrailer() { // Write any remaining queued frames to video file write_queued_frames(); @@ -551,12 +791,11 @@ // Mark as 'written' write_trailer = true; - ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::WriteTrailer", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::WriteTrailer"); } // Flush encoders -void FFmpegWriter::flush_encoders() -{ +void FFmpegWriter::flush_encoders() { if (info.has_audio && audio_codec && AV_GET_CODEC_TYPE(audio_st) == AVMEDIA_TYPE_AUDIO && AV_GET_CODEC_ATTRIBUTES(audio_st, audio_codec)->frame_size <= 1) return; #if (LIBAVFORMAT_VERSION_MAJOR < 58) @@ -564,15 +803,15 @@ return; #endif - int error_code = 0; - int stop_encoding = 1; + int error_code = 0; + int stop_encoding = 1; - // FLUSH VIDEO ENCODER - if (info.has_video) + // FLUSH VIDEO ENCODER + if (info.has_video) for (;;) { // Increment PTS (in frames and scaled to the codec's timebase) - write_video_count += av_rescale_q(1, (AVRational){info.fps.den, info.fps.num}, video_codec->time_base); + write_video_count += av_rescale_q(1, (AVRational) {info.fps.den, info.fps.num}, video_codec->time_base); AVPacket pkt; av_init_packet(&pkt); @@ -586,7 +825,7 @@ int got_packet = 0; int error_code = 0; - #if IS_FFMPEG_3_2 +#if IS_FFMPEG_3_2 #pragma omp critical (write_video_packet) { // Encode video packet (latest version of FFmpeg) @@ -610,34 +849,34 @@ error_code = av_interleaved_write_frame(oc, &pkt); } } - #else +#else // IS_FFMPEG_3_2 - #if LIBAVFORMAT_VERSION_MAJOR >= 54 - // Encode video packet (older than FFmpeg 3.2) - error_code = avcodec_encode_video2(video_codec, &pkt, NULL, &got_packet); - - #else - // Encode video packet (even older version of FFmpeg) - int video_outbuf_size = 0; - - /* encode the image */ - int out_size = avcodec_encode_video(video_codec, NULL, video_outbuf_size, NULL); - - /* if zero size, it means the image was buffered */ - if (out_size > 0) { - if(video_codec->coded_frame->key_frame) - pkt.flags |= AV_PKT_FLAG_KEY; - pkt.data= video_outbuf; - pkt.size= out_size; +#if LIBAVFORMAT_VERSION_MAJOR >= 54 + // Encode video packet (older than FFmpeg 3.2) + error_code = avcodec_encode_video2(video_codec, &pkt, NULL, &got_packet); - // got data back (so encode this frame) - got_packet = 1; - } - #endif - #endif +#else + // Encode video packet (even older version of FFmpeg) + int video_outbuf_size = 0; + + /* encode the image */ + int out_size = avcodec_encode_video(video_codec, NULL, video_outbuf_size, NULL); + + /* if zero size, it means the image was buffered */ + if (out_size > 0) { + if(video_codec->coded_frame->key_frame) + pkt.flags |= AV_PKT_FLAG_KEY; + pkt.data= video_outbuf; + pkt.size= out_size; + + // got data back (so encode this frame) + got_packet = 1; + } +#endif // LIBAVFORMAT_VERSION_MAJOR >= 54 +#endif // IS_FFMPEG_3_2 if (error_code < 0) { - ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::flush_encoders ERROR [" + (string)av_err2str(error_code) + "]", "error_code", error_code, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::flush_encoders ERROR [" + (std::string) av_err2str(error_code) + "]", "error_code", error_code); } if (!got_packet) { stop_encoding = 1; @@ -659,7 +898,7 @@ // Write packet error_code = av_interleaved_write_frame(oc, &pkt); if (error_code < 0) { - ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::flush_encoders ERROR [" + (string)av_err2str(error_code) + "]", "error_code", error_code, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::flush_encoders ERROR [" + (std::string)av_err2str(error_code) + "]", "error_code", error_code); } // Deallocate memory (if needed) @@ -667,8 +906,8 @@ av_freep(&video_outbuf); } - // FLUSH AUDIO ENCODER - if (info.has_audio) + // FLUSH AUDIO ENCODER + if (info.has_audio) for (;;) { // Increment PTS (in samples and scaled to the codec's timebase) @@ -687,14 +926,14 @@ /* encode the image */ int got_packet = 0; - #if IS_FFMPEG_3_2 +#if IS_FFMPEG_3_2 avcodec_send_frame(audio_codec, NULL); got_packet = 0; - #else +#else error_code = avcodec_encode_audio2(audio_codec, &pkt, NULL, &got_packet); - #endif +#endif if (error_code < 0) { - ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::flush_encoders ERROR [" + (string)av_err2str(error_code) + "]", "error_code", error_code, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::flush_encoders ERROR [" + (std::string)av_err2str(error_code) + "]", "error_code", error_code); } if (!got_packet) { stop_encoding = 1; @@ -720,7 +959,7 @@ // Write packet error_code = av_interleaved_write_frame(oc, &pkt); if (error_code < 0) { - ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::flush_encoders ERROR [" + (string)av_err2str(error_code) + "]", "error_code", error_code, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::flush_encoders ERROR [" + (std::string)av_err2str(error_code) + "]", "error_code", error_code); } // deallocate memory for packet @@ -733,16 +972,19 @@ // Close the video codec void FFmpegWriter::close_video(AVFormatContext *oc, AVStream *st) { - AV_FREE_CONTEXT(video_codec); - video_codec = NULL; +#if HAVE_HW_ACCEL + if (hw_en_on && hw_en_supported) { + if (hw_device_ctx) { + av_buffer_unref(&hw_device_ctx); + hw_device_ctx = NULL; + } + } +#endif // HAVE_HW_ACCEL } // Close the audio codec void FFmpegWriter::close_audio(AVFormatContext *oc, AVStream *st) { - AV_FREE_CONTEXT(audio_codec); - audio_codec = NULL; - // Clear buffers delete[] samples; delete[] audio_outbuf; @@ -766,8 +1008,7 @@ } // Close the writer -void FFmpegWriter::Close() -{ +void FFmpegWriter::Close() { // Write trailer (if needed) if (!write_trailer) WriteTrailer(); @@ -782,12 +1023,6 @@ if (image_rescalers.size() > 0) RemoveScalers(); - // Free the streams - for (int i = 0; i < oc->nb_streams; i++) { - av_freep(AV_GET_CODEC_ATTRIBUTES(&oc->streams[i], &oc->streams[i])); - av_freep(&oc->streams[i]); - } - if (!(fmt->flags & AVFMT_NOFILE)) { /* close the output file */ avio_close(oc->pb); @@ -797,8 +1032,9 @@ write_video_count = 0; write_audio_count = 0; - // Free the context - av_freep(&oc); + // Free the context which frees the streams too + avformat_free_context(oc); + oc = NULL; // Close writer is_open = false; @@ -806,28 +1042,23 @@ write_header = false; write_trailer = false; - ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::Close", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::Close"); } // Add an AVFrame to the cache -void FFmpegWriter::add_avframe(std::shared_ptr frame, AVFrame* av_frame) -{ +void FFmpegWriter::add_avframe(std::shared_ptr frame, AVFrame *av_frame) { // Add AVFrame to map (if it does not already exist) - if (!av_frames.count(frame)) - { + if (!av_frames.count(frame)) { // Add av_frame av_frames[frame] = av_frame; - } - else - { + } else { // Do not add, and deallocate this AVFrame AV_FREE_FRAME(&av_frame); } } // Add an audio output stream -AVStream* FFmpegWriter::add_audio_stream() -{ +AVStream *FFmpegWriter::add_audio_stream() { AVCodecContext *c; AVStream *st; @@ -853,27 +1084,25 @@ // Set valid sample rate (or throw error) if (codec->supported_samplerates) { int i; - for (i = 0; codec->supported_samplerates[i] != 0; i++) - if (info.sample_rate == codec->supported_samplerates[i]) - { - // Set the valid sample rate - c->sample_rate = info.sample_rate; - break; - } - if (codec->supported_samplerates[i] == 0) - throw InvalidSampleRate("An invalid sample rate was detected for this codec.", path); + for (i = 0; codec->supported_samplerates[i] != 0; i++) + if (info.sample_rate == codec->supported_samplerates[i]) { + // Set the valid sample rate + c->sample_rate = info.sample_rate; + break; + } + if (codec->supported_samplerates[i] == 0) + throw InvalidSampleRate("An invalid sample rate was detected for this codec.", path); } else // Set sample rate c->sample_rate = info.sample_rate; // Set a valid number of channels (or throw error) - int channel_layout = info.channel_layout; + const uint64_t channel_layout = info.channel_layout; if (codec->channel_layouts) { int i; for (i = 0; codec->channel_layouts[i] != 0; i++) - if (channel_layout == codec->channel_layouts[i]) - { + if (channel_layout == codec->channel_layouts[i]) { // Set valid channel layout c->channel_layout = channel_layout; break; @@ -886,8 +1115,7 @@ // Choose a valid sample_fmt if (codec->sample_fmts) { - for (int i = 0; codec->sample_fmts[i] != AV_SAMPLE_FMT_NONE; i++) - { + for (int i = 0; codec->sample_fmts[i] != AV_SAMPLE_FMT_NONE; i++) { // Set sample format to 1st valid format (and then exit loop) c->sample_fmt = codec->sample_fmts[i]; break; @@ -913,8 +1141,7 @@ } // Add a video output stream -AVStream* FFmpegWriter::add_video_stream() -{ +AVStream *FFmpegWriter::add_video_stream() { AVCodecContext *c; AVStream *st; @@ -934,17 +1161,48 @@ #endif /* Init video encoder options */ - c->bit_rate = info.video_bit_rate; + if (info.video_bit_rate >= 1000) { + c->bit_rate = info.video_bit_rate; + if (info.video_bit_rate >= 1500000) { + c->qmin = 2; + c->qmax = 30; + } + // Here should be the setting for low fixed bitrate + // Defaults are used because mpeg2 otherwise had problems + } else { + // Check if codec supports crf + switch (c->codec_id) { +#if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(55, 39, 101) +#if (LIBAVCODEC_VERSION_MAJOR >= 58) + case AV_CODEC_ID_AV1 : +#endif + case AV_CODEC_ID_VP9 : + case AV_CODEC_ID_HEVC : +#endif + case AV_CODEC_ID_VP8 : + case AV_CODEC_ID_H264 : + if (info.video_bit_rate < 40) { + c->qmin = 0; + c->qmax = 63; + } else { + c->qmin = info.video_bit_rate - 5; + c->qmax = 63; + } + break; + default: + // Here should be the setting for codecs that don't support crf + // For now defaults are used + break; + } + } - //TODO: Implement variable bitrate feature (which actually works). This implementation throws +//TODO: Implement variable bitrate feature (which actually works). This implementation throws //invalid bitrate errors and rc buffer underflow errors, etc... //c->rc_min_rate = info.video_bit_rate; //c->rc_max_rate = info.video_bit_rate; //c->rc_buffer_size = FFMAX(c->rc_max_rate, 15000000) * 112L / 15000000 * 16384; //if ( !c->rc_initial_buffer_occupancy ) // c->rc_initial_buffer_occupancy = c->rc_buffer_size * 3/4; - c->qmin = 2; - c->qmax = 30; /* resolution must be a multiple of two */ // TODO: require /2 height and width @@ -957,9 +1215,10 @@ identically 1. */ c->time_base.num = info.video_timebase.num; c->time_base.den = info.video_timebase.den; - #if LIBAVFORMAT_VERSION_MAJOR >= 56 +// AVCodecContext->framerate was added in FFmpeg 2.2 +#if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(56, 26, 0) c->framerate = av_inv_q(c->time_base); - #endif +#endif st->avg_frame_rate = av_inv_q(c->time_base); st->time_base.num = info.video_timebase.num; st->time_base.den = info.video_timebase.den; @@ -983,50 +1242,49 @@ #endif // Find all supported pixel formats for this codec - const PixelFormat* supported_pixel_formats = codec->pix_fmts; - while (supported_pixel_formats != NULL && *supported_pixel_formats != PIX_FMT_NONE) { - // Assign the 1st valid pixel format (if one is missing) - if (c->pix_fmt == PIX_FMT_NONE) - c->pix_fmt = *supported_pixel_formats; - ++supported_pixel_formats; - } - - // Codec doesn't have any pix formats? - if (c->pix_fmt == PIX_FMT_NONE) { - if(fmt->video_codec == AV_CODEC_ID_RAWVIDEO) { - // Raw video should use RGB24 - c->pix_fmt = PIX_FMT_RGB24; + const PixelFormat *supported_pixel_formats = codec->pix_fmts; + while (supported_pixel_formats != NULL && *supported_pixel_formats != PIX_FMT_NONE) { + // Assign the 1st valid pixel format (if one is missing) + if (c->pix_fmt == PIX_FMT_NONE) + c->pix_fmt = *supported_pixel_formats; + ++supported_pixel_formats; + } + + // Codec doesn't have any pix formats? + if (c->pix_fmt == PIX_FMT_NONE) { + if (fmt->video_codec == AV_CODEC_ID_RAWVIDEO) { + // Raw video should use RGB24 + c->pix_fmt = PIX_FMT_RGB24; #if (LIBAVFORMAT_VERSION_MAJOR < 58) - if (strcmp(fmt->name, "gif") != 0) - // If not GIF format, skip the encoding process - // Set raw picture flag (so we don't encode this video) - oc->oformat->flags |= AVFMT_RAWPICTURE; + if (strcmp(fmt->name, "gif") != 0) + // If not GIF format, skip the encoding process + // Set raw picture flag (so we don't encode this video) + oc->oformat->flags |= AVFMT_RAWPICTURE; #endif - } else { - // Set the default codec - c->pix_fmt = PIX_FMT_YUV420P; - } - } + } else { + // Set the default codec + c->pix_fmt = PIX_FMT_YUV420P; + } + } AV_COPY_PARAMS_FROM_CONTEXT(st, c); #if (LIBAVFORMAT_VERSION_MAJOR < 58) - ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::add_video_stream (" + (string)fmt->name + " : " + (string)av_get_pix_fmt_name(c->pix_fmt) + ")", "c->codec_id", c->codec_id, "c->bit_rate", c->bit_rate, "c->pix_fmt", c->pix_fmt, "oc->oformat->flags", oc->oformat->flags, "AVFMT_RAWPICTURE", AVFMT_RAWPICTURE, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::add_video_stream (" + (std::string)fmt->name + " : " + (std::string)av_get_pix_fmt_name(c->pix_fmt) + ")", "c->codec_id", c->codec_id, "c->bit_rate", c->bit_rate, "c->pix_fmt", c->pix_fmt, "oc->oformat->flags", oc->oformat->flags, "AVFMT_RAWPICTURE", AVFMT_RAWPICTURE); #else - ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::add_video_stream (" + (string)fmt->name + " : " + (string)av_get_pix_fmt_name(c->pix_fmt) + ")", "c->codec_id", c->codec_id, "c->bit_rate", c->bit_rate, "c->pix_fmt", c->pix_fmt, "oc->oformat->flags", oc->oformat->flags, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::add_video_stream (" + (std::string)fmt->name + " : " + (std::string)av_get_pix_fmt_name(c->pix_fmt) + ")", "c->codec_id", c->codec_id, "c->bit_rate", c->bit_rate, "c->pix_fmt", c->pix_fmt, "oc->oformat->flags", oc->oformat->flags); #endif return st; } // open audio codec -void FFmpegWriter::open_audio(AVFormatContext *oc, AVStream *st) -{ +void FFmpegWriter::open_audio(AVFormatContext *oc, AVStream *st) { AVCodec *codec; AV_GET_CODEC_FROM_STREAM(st, audio_codec) // Set number of threads equal to number of processors (not to exceed 16) - audio_codec->thread_count = min(FF_NUM_PROCESSORS, 16); + audio_codec->thread_count = std::min(FF_NUM_PROCESSORS, 16); // Find the audio encoder codec = avcodec_find_encoder_by_name(info.acodec.c_str()); @@ -1041,7 +1299,7 @@ // Open the codec if (avcodec_open2(audio_codec, codec, &opts) < 0) - throw InvalidCodec("Could not open codec", path); + throw InvalidCodec("Could not open audio codec", path); AV_COPY_PARAMS_FROM_CONTEXT(st, audio_codec); // Free options @@ -1055,14 +1313,14 @@ int s = AV_FIND_DECODER_CODEC_ID(st); switch (s) { - case AV_CODEC_ID_PCM_S16LE: - case AV_CODEC_ID_PCM_S16BE: - case AV_CODEC_ID_PCM_U16LE: - case AV_CODEC_ID_PCM_U16BE: - audio_input_frame_size >>= 1; - break; - default: - break; + case AV_CODEC_ID_PCM_S16LE: + case AV_CODEC_ID_PCM_S16BE: + case AV_CODEC_ID_PCM_U16LE: + case AV_CODEC_ID_PCM_U16BE: + audio_input_frame_size >>= 1; + break; + default: + break; } } else { // Set frame size based on the codec @@ -1084,23 +1342,65 @@ audio_encoder_buffer = new uint8_t[audio_encoder_buffer_size]; // Add audio metadata (if any) - for(std::map::iterator iter = info.metadata.begin(); iter != info.metadata.end(); ++iter) - { + for (std::map::iterator iter = info.metadata.begin(); iter != info.metadata.end(); ++iter) { av_dict_set(&st->metadata, iter->first.c_str(), iter->second.c_str(), 0); } - ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::open_audio", "audio_codec->thread_count", audio_codec->thread_count, "audio_input_frame_size", audio_input_frame_size, "buffer_size", AVCODEC_MAX_AUDIO_FRAME_SIZE + MY_INPUT_BUFFER_PADDING_SIZE, "", -1, "", -1, "", -1); - + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::open_audio", "audio_codec->thread_count", audio_codec->thread_count, "audio_input_frame_size", audio_input_frame_size, "buffer_size", AVCODEC_MAX_AUDIO_FRAME_SIZE + MY_INPUT_BUFFER_PADDING_SIZE); } // open video codec -void FFmpegWriter::open_video(AVFormatContext *oc, AVStream *st) -{ +void FFmpegWriter::open_video(AVFormatContext *oc, AVStream *st) { AVCodec *codec; AV_GET_CODEC_FROM_STREAM(st, video_codec) // Set number of threads equal to number of processors (not to exceed 16) - video_codec->thread_count = min(FF_NUM_PROCESSORS, 16); + video_codec->thread_count = std::min(FF_NUM_PROCESSORS, 16); + +#if HAVE_HW_ACCEL + if (hw_en_on && hw_en_supported) { + //char *dev_hw = NULL; + char adapter[256]; + char *adapter_ptr = NULL; + int adapter_num; + // Use the hw device given in the environment variable HW_EN_DEVICE_SET or the default if not set + adapter_num = openshot::Settings::Instance()->HW_EN_DEVICE_SET; + fprintf(stderr, "\n\nEncodiing Device Nr: %d\n", adapter_num); + if (adapter_num < 3 && adapter_num >=0) { +#if defined(__linux__) + snprintf(adapter,sizeof(adapter),"/dev/dri/renderD%d", adapter_num+128); + // Maybe 127 is better because the first card would be 1?! + adapter_ptr = adapter; +#elif defined(_WIN32) + adapter_ptr = NULL; +#elif defined(__APPLE__) + adapter_ptr = NULL; +#endif + } + else { + adapter_ptr = NULL; // Just to be sure + } +// Check if it is there and writable +#if defined(__linux__) + if( adapter_ptr != NULL && access( adapter_ptr, W_OK ) == 0 ) { +#elif defined(_WIN32) + if( adapter_ptr != NULL ) { +#elif defined(__APPLE__) + if( adapter_ptr != NULL ) { +#endif + ZmqLogger::Instance()->AppendDebugMethod("Encode Device present using device", "adapter", adapter_num); + } + else { + adapter_ptr = NULL; // use default + ZmqLogger::Instance()->AppendDebugMethod("Encode Device not present using default"); + } + if (av_hwdevice_ctx_create(&hw_device_ctx, hw_en_av_device_type, + adapter_ptr, NULL, 0) < 0) { + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::open_video : Codec name: ", info.vcodec.c_str(), -1, " ERROR creating\n", -1); + throw InvalidCodec("Could not create hwdevice", path); + } + } +#endif // HAVE_HW_ACCEL /* find the video encoder */ codec = avcodec_find_encoder_by_name(info.vcodec.c_str()); @@ -1109,36 +1409,85 @@ if (!codec) throw InvalidCodec("Could not find codec", path); - /* Force max_b_frames to 0 in some cases (i.e. for mjpeg image sequences */ - if(video_codec->max_b_frames && video_codec->codec_id != AV_CODEC_ID_MPEG4 && video_codec->codec_id != AV_CODEC_ID_MPEG1VIDEO && video_codec->codec_id != AV_CODEC_ID_MPEG2VIDEO) - video_codec->max_b_frames = 0; + /* Force max_b_frames to 0 in some cases (i.e. for mjpeg image sequences */ + if (video_codec->max_b_frames && video_codec->codec_id != AV_CODEC_ID_MPEG4 && video_codec->codec_id != AV_CODEC_ID_MPEG1VIDEO && video_codec->codec_id != AV_CODEC_ID_MPEG2VIDEO) + video_codec->max_b_frames = 0; // Init options AVDictionary *opts = NULL; av_dict_set(&opts, "strict", "experimental", 0); +#if HAVE_HW_ACCEL + if (hw_en_on && hw_en_supported) { + video_codec->pix_fmt = hw_en_av_pix_fmt; + + // for the list of possible options, see the list of codec-specific options: + // e.g. ffmpeg -h encoder=h264_vaapi or ffmpeg -h encoder=hevc_vaapi + // and "man ffmpeg-codecs" + + // For VAAPI, it is safer to explicitly set rc_mode instead of relying on auto-selection + // which is ffmpeg version-specific. + if (hw_en_av_pix_fmt == AV_PIX_FMT_VAAPI) { + int64_t qp; + if (av_opt_get_int(video_codec->priv_data, "qp", 0, &qp) != 0 || qp == 0) { + // unless "qp" was set for CQP, switch to VBR RC mode + av_opt_set(video_codec->priv_data, "rc_mode", "VBR", 0); + + // In the current state (ffmpeg-4.2-4 libva-mesa-driver-19.1.5-1) to use VBR, + // one has to specify both bit_rate and maxrate, otherwise a small low quality file is generated on Intel iGPU). + video_codec->rc_max_rate = video_codec->bit_rate; + } + } + + switch (video_codec->codec_id) { + case AV_CODEC_ID_H264: + video_codec->max_b_frames = 0; // At least this GPU doesn't support b-frames + video_codec->profile = FF_PROFILE_H264_BASELINE | FF_PROFILE_H264_CONSTRAINED; + av_opt_set(video_codec->priv_data, "preset", "slow", 0); + av_opt_set(video_codec->priv_data, "tune", "zerolatency", 0); + av_opt_set(video_codec->priv_data, "vprofile", "baseline", AV_OPT_SEARCH_CHILDREN); + break; + case AV_CODEC_ID_HEVC: + // tested to work with defaults + break; + case AV_CODEC_ID_VP9: + // tested to work with defaults + break; + default: + ZmqLogger::Instance()->AppendDebugMethod("No codec-specific options defined for this codec. HW encoding may fail", + "codec_id", video_codec->codec_id); + break; + } + + // set hw_frames_ctx for encoder's AVCodecContext + int err; + if ((err = set_hwframe_ctx(video_codec, hw_device_ctx, info.width, info.height)) < 0) { + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::open_video (set_hwframe_ctx) ERROR faled to set hwframe context", + "width", info.width, "height", info.height, av_err2str(err), -1); + } + } +#endif // HAVE_HW_ACCEL + /* open the codec */ if (avcodec_open2(video_codec, codec, &opts) < 0) - throw InvalidCodec("Could not open codec", path); + throw InvalidCodec("Could not open video codec", path); AV_COPY_PARAMS_FROM_CONTEXT(st, video_codec); // Free options av_dict_free(&opts); // Add video metadata (if any) - for(std::map::iterator iter = info.metadata.begin(); iter != info.metadata.end(); ++iter) - { + for (std::map::iterator iter = info.metadata.begin(); iter != info.metadata.end(); ++iter) { av_dict_set(&st->metadata, iter->first.c_str(), iter->second.c_str(), 0); } - ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::open_video", "video_codec->thread_count", video_codec->thread_count, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::open_video", "video_codec->thread_count", video_codec->thread_count); } // write all queued frames' audio to the video file -void FFmpegWriter::write_audio_packets(bool final) -{ - #pragma omp task firstprivate(final) +void FFmpegWriter::write_audio_packets(bool is_final) { +#pragma omp task firstprivate(is_final) { // Init audio buffers / variables int total_frame_samples = 0; @@ -1149,14 +1498,14 @@ ChannelLayout channel_layout_in_frame = LAYOUT_MONO; // default channel layout // Create a new array (to hold all S16 audio samples, for the current queued frames - int16_t* all_queued_samples = (int16_t*)av_malloc((sizeof(int16_t)*(queued_audio_frames.size() * AVCODEC_MAX_AUDIO_FRAME_SIZE))); - int16_t* all_resampled_samples = NULL; - int16_t* final_samples_planar = NULL; - int16_t* final_samples = NULL; + unsigned int all_queued_samples_size = sizeof(int16_t) * (queued_audio_frames.size() * AVCODEC_MAX_AUDIO_FRAME_SIZE); + int16_t *all_queued_samples = (int16_t *) av_malloc(all_queued_samples_size); + int16_t *all_resampled_samples = NULL; + int16_t *final_samples_planar = NULL; + int16_t *final_samples = NULL; // Loop through each queued audio frame - while (!queued_audio_frames.empty()) - { + while (!queued_audio_frames.empty()) { // Get front frame (from the queue) std::shared_ptr frame = queued_audio_frames.front(); @@ -1168,7 +1517,7 @@ // Get audio sample array - float* frame_samples_float = NULL; + float *frame_samples_float = NULL; // Get samples interleaved together (c1 c2 c1 c2 c1 c2) frame_samples_float = frame->GetInterleavedAudioSamples(sample_rate_in_frame, NULL, &samples_in_frame); @@ -1197,54 +1546,52 @@ int samples_position = 0; - ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_audio_packets", "final", final, "total_frame_samples", total_frame_samples, "channel_layout_in_frame", channel_layout_in_frame, "channels_in_frame", channels_in_frame, "samples_in_frame", samples_in_frame, "LAYOUT_MONO", LAYOUT_MONO); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_audio_packets", "is_final", is_final, "total_frame_samples", total_frame_samples, "channel_layout_in_frame", channel_layout_in_frame, "channels_in_frame", channels_in_frame, "samples_in_frame", samples_in_frame, "LAYOUT_MONO", LAYOUT_MONO); // Keep track of the original sample format AVSampleFormat output_sample_fmt = audio_codec->sample_fmt; AVFrame *audio_frame = NULL; - if (!final) { + if (!is_final) { // Create input frame (and allocate arrays) audio_frame = AV_ALLOCATE_FRAME(); AV_RESET_FRAME(audio_frame); audio_frame->nb_samples = total_frame_samples / channels_in_frame; // Fill input frame with sample data - avcodec_fill_audio_frame(audio_frame, channels_in_frame, AV_SAMPLE_FMT_S16, (uint8_t *) all_queued_samples, - audio_encoder_buffer_size, 0); + int error_code = avcodec_fill_audio_frame(audio_frame, channels_in_frame, AV_SAMPLE_FMT_S16, (uint8_t *) all_queued_samples, all_queued_samples_size, 0); + if (error_code < 0) { + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_audio_packets ERROR [" + (std::string) av_err2str(error_code) + "]", "error_code", error_code); + } // Do not convert audio to planar format (yet). We need to keep everything interleaved at this point. - switch (audio_codec->sample_fmt) - { - case AV_SAMPLE_FMT_FLTP: - { + switch (audio_codec->sample_fmt) { + case AV_SAMPLE_FMT_FLTP: { output_sample_fmt = AV_SAMPLE_FMT_FLT; break; } - case AV_SAMPLE_FMT_S32P: - { + case AV_SAMPLE_FMT_S32P: { output_sample_fmt = AV_SAMPLE_FMT_S32; break; } - case AV_SAMPLE_FMT_S16P: - { + case AV_SAMPLE_FMT_S16P: { output_sample_fmt = AV_SAMPLE_FMT_S16; break; } - case AV_SAMPLE_FMT_U8P: - { + case AV_SAMPLE_FMT_U8P: { output_sample_fmt = AV_SAMPLE_FMT_U8; break; } + default: { + // This is only here to silence unused-enum warnings + break; + } } // Update total samples & input frame size (due to bigger or smaller data types) total_frame_samples *= (float(info.sample_rate) / sample_rate_in_frame); // adjust for different byte sizes total_frame_samples *= (float(info.channels) / channels_in_frame); // adjust for different # of channels - // Set remaining samples - remaining_frame_samples = total_frame_samples; - // Create output frame (and allocate arrays) AVFrame *audio_converted = AV_ALLOCATE_FRAME(); AV_RESET_FRAME(audio_converted); @@ -1256,45 +1603,49 @@ // setup resample context if (!avr) { avr = SWR_ALLOC(); - av_opt_set_int(avr, "in_channel_layout", channel_layout_in_frame, 0); + av_opt_set_int(avr, "in_channel_layout", channel_layout_in_frame, 0); av_opt_set_int(avr, "out_channel_layout", info.channel_layout, 0); - av_opt_set_int(avr, "in_sample_fmt", AV_SAMPLE_FMT_S16, 0); - av_opt_set_int(avr, "out_sample_fmt", output_sample_fmt, 0); // planar not allowed here - av_opt_set_int(avr, "in_sample_rate", sample_rate_in_frame, 0); - av_opt_set_int(avr, "out_sample_rate", info.sample_rate, 0); - av_opt_set_int(avr, "in_channels", channels_in_frame, 0); - av_opt_set_int(avr, "out_channels", info.channels, 0); + av_opt_set_int(avr, "in_sample_fmt", AV_SAMPLE_FMT_S16, 0); + av_opt_set_int(avr, "out_sample_fmt", output_sample_fmt, 0); // planar not allowed here + av_opt_set_int(avr, "in_sample_rate", sample_rate_in_frame, 0); + av_opt_set_int(avr, "out_sample_rate", info.sample_rate, 0); + av_opt_set_int(avr, "in_channels", channels_in_frame, 0); + av_opt_set_int(avr, "out_channels", info.channels, 0); SWR_INIT(avr); } int nb_samples = 0; // Convert audio samples - nb_samples = SWR_CONVERT(avr, // audio resample context - audio_converted->data, // output data pointers - audio_converted->linesize[0], // output plane size, in bytes. (0 if unknown) - audio_converted->nb_samples, // maximum number of samples that the output buffer can hold - audio_frame->data, // input data pointers - audio_frame->linesize[0], // input plane size, in bytes (0 if unknown) - audio_frame->nb_samples); // number of input samples to convert + nb_samples = SWR_CONVERT(avr, // audio resample context + audio_converted->data, // output data pointers + audio_converted->linesize[0], // output plane size, in bytes. (0 if unknown) + audio_converted->nb_samples, // maximum number of samples that the output buffer can hold + audio_frame->data, // input data pointers + audio_frame->linesize[0], // input plane size, in bytes (0 if unknown) + audio_frame->nb_samples); // number of input samples to convert + + // Set remaining samples + remaining_frame_samples = nb_samples * av_get_bytes_per_sample(AV_SAMPLE_FMT_S16); // Create a new array (to hold all resampled S16 audio samples) - all_resampled_samples = (int16_t*)av_malloc(sizeof(int16_t) * nb_samples * info.channels * (av_get_bytes_per_sample(output_sample_fmt) / av_get_bytes_per_sample(AV_SAMPLE_FMT_S16))); + all_resampled_samples = (int16_t *) av_malloc( + sizeof(int16_t) * nb_samples * info.channels * (av_get_bytes_per_sample(output_sample_fmt) / av_get_bytes_per_sample(AV_SAMPLE_FMT_S16))); // Copy audio samples over original samples memcpy(all_resampled_samples, audio_converted->data[0], nb_samples * info.channels * av_get_bytes_per_sample(output_sample_fmt)); // Remove converted audio av_freep(&(audio_frame->data[0])); - AV_FREE_FRAME(&audio_frame); + AV_FREE_FRAME(&audio_frame); av_freep(&audio_converted->data[0]); - AV_FREE_FRAME(&audio_converted); + AV_FREE_FRAME(&audio_converted); all_queued_samples = NULL; // this array cleared with above call - ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_audio_packets (Successfully completed 1st resampling)", "nb_samples", nb_samples, "remaining_frame_samples", remaining_frame_samples, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_audio_packets (Successfully completed 1st resampling)", "nb_samples", nb_samples, "remaining_frame_samples", remaining_frame_samples); } // Loop until no more samples - while (remaining_frame_samples > 0 || final) { + while (remaining_frame_samples > 0 || is_final) { // Get remaining samples needed for this packet int remaining_packet_samples = (audio_input_frame_size * info.channels) - audio_input_position; @@ -1306,9 +1657,10 @@ diff = remaining_frame_samples; // Copy frame samples into the packet samples array - if (!final) + if (!is_final) //TODO: Make this more sane - memcpy(samples + (audio_input_position * (av_get_bytes_per_sample(output_sample_fmt) / av_get_bytes_per_sample(AV_SAMPLE_FMT_S16))), all_resampled_samples + samples_position, diff * av_get_bytes_per_sample(output_sample_fmt)); + memcpy(samples + (audio_input_position * (av_get_bytes_per_sample(output_sample_fmt) / av_get_bytes_per_sample(AV_SAMPLE_FMT_S16))), + all_resampled_samples + samples_position, diff * av_get_bytes_per_sample(output_sample_fmt)); // Increment counters audio_input_position += diff; @@ -1317,28 +1669,27 @@ remaining_packet_samples -= diff; // Do we have enough samples to proceed? - if (audio_input_position < (audio_input_frame_size * info.channels) && !final) + if (audio_input_position < (audio_input_frame_size * info.channels) && !is_final) // Not enough samples to encode... so wait until the next frame break; // Convert to planar (if needed by audio codec) AVFrame *frame_final = AV_ALLOCATE_FRAME(); AV_RESET_FRAME(frame_final); - if (av_sample_fmt_is_planar(audio_codec->sample_fmt)) - { + if (av_sample_fmt_is_planar(audio_codec->sample_fmt)) { ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_audio_packets (2nd resampling for Planar formats)", "in_sample_fmt", output_sample_fmt, "out_sample_fmt", audio_codec->sample_fmt, "in_sample_rate", info.sample_rate, "out_sample_rate", info.sample_rate, "in_channels", info.channels, "out_channels", info.channels); // setup resample context if (!avr_planar) { avr_planar = SWR_ALLOC(); - av_opt_set_int(avr_planar, "in_channel_layout", info.channel_layout, 0); + av_opt_set_int(avr_planar, "in_channel_layout", info.channel_layout, 0); av_opt_set_int(avr_planar, "out_channel_layout", info.channel_layout, 0); - av_opt_set_int(avr_planar, "in_sample_fmt", output_sample_fmt, 0); - av_opt_set_int(avr_planar, "out_sample_fmt", audio_codec->sample_fmt, 0); // planar not allowed here - av_opt_set_int(avr_planar, "in_sample_rate", info.sample_rate, 0); - av_opt_set_int(avr_planar, "out_sample_rate", info.sample_rate, 0); - av_opt_set_int(avr_planar, "in_channels", info.channels, 0); - av_opt_set_int(avr_planar, "out_channels", info.channels, 0); + av_opt_set_int(avr_planar, "in_sample_fmt", output_sample_fmt, 0); + av_opt_set_int(avr_planar, "out_sample_fmt", audio_codec->sample_fmt, 0); // planar not allowed here + av_opt_set_int(avr_planar, "in_sample_rate", info.sample_rate, 0); + av_opt_set_int(avr_planar, "out_sample_rate", info.sample_rate, 0); + av_opt_set_int(avr_planar, "in_channels", info.channels, 0); + av_opt_set_int(avr_planar, "out_channels", info.channels, 0); SWR_INIT(avr_planar); } @@ -1348,42 +1699,44 @@ audio_frame->nb_samples = audio_input_position / info.channels; // Create a new array - final_samples_planar = (int16_t*)av_malloc(sizeof(int16_t) * audio_frame->nb_samples * info.channels * (av_get_bytes_per_sample(output_sample_fmt) / av_get_bytes_per_sample(AV_SAMPLE_FMT_S16))); + final_samples_planar = (int16_t *) av_malloc( + sizeof(int16_t) * audio_frame->nb_samples * info.channels * (av_get_bytes_per_sample(output_sample_fmt) / av_get_bytes_per_sample(AV_SAMPLE_FMT_S16))); // Copy audio into buffer for frame memcpy(final_samples_planar, samples, audio_frame->nb_samples * info.channels * av_get_bytes_per_sample(output_sample_fmt)); // Fill input frame with sample data avcodec_fill_audio_frame(audio_frame, info.channels, output_sample_fmt, (uint8_t *) final_samples_planar, - audio_encoder_buffer_size, 0); + audio_encoder_buffer_size, 0); // Create output frame (and allocate arrays) frame_final->nb_samples = audio_input_frame_size; av_samples_alloc(frame_final->data, frame_final->linesize, info.channels, frame_final->nb_samples, audio_codec->sample_fmt, 0); // Convert audio samples - int nb_samples = SWR_CONVERT(avr_planar, // audio resample context - frame_final->data, // output data pointers - frame_final->linesize[0], // output plane size, in bytes. (0 if unknown) - frame_final->nb_samples, // maximum number of samples that the output buffer can hold - audio_frame->data, // input data pointers - audio_frame->linesize[0], // input plane size, in bytes (0 if unknown) - audio_frame->nb_samples); // number of input samples to convert + int nb_samples = SWR_CONVERT(avr_planar, // audio resample context + frame_final->data, // output data pointers + frame_final->linesize[0], // output plane size, in bytes. (0 if unknown) + frame_final->nb_samples, // maximum number of samples that the output buffer can hold + audio_frame->data, // input data pointers + audio_frame->linesize[0], // input plane size, in bytes (0 if unknown) + audio_frame->nb_samples); // number of input samples to convert // Copy audio samples over original samples if (nb_samples > 0) memcpy(samples, frame_final->data[0], nb_samples * av_get_bytes_per_sample(audio_codec->sample_fmt) * info.channels); // deallocate AVFrame - av_freep(&(audio_frame->data[0])); - AV_FREE_FRAME(&audio_frame); - all_queued_samples = NULL; // this array cleared with above call + av_freep(&(audio_frame->data[0])); + AV_FREE_FRAME(&audio_frame); + all_queued_samples = NULL; // this array cleared with above call - ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_audio_packets (Successfully completed 2nd resampling for Planar formats)", "nb_samples", nb_samples, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_audio_packets (Successfully completed 2nd resampling for Planar formats)", "nb_samples", nb_samples); } else { // Create a new array - final_samples = new int16_t[audio_input_position * (av_get_bytes_per_sample(audio_codec->sample_fmt) / av_get_bytes_per_sample(AV_SAMPLE_FMT_S16))]; + final_samples = (int16_t *) av_malloc( + sizeof(int16_t) * audio_input_position * (av_get_bytes_per_sample(audio_codec->sample_fmt) / av_get_bytes_per_sample(AV_SAMPLE_FMT_S16))); // Copy audio into buffer for frame memcpy(final_samples, samples, audio_input_position * av_get_bytes_per_sample(audio_codec->sample_fmt)); @@ -1393,7 +1746,7 @@ // Fill the final_frame AVFrame with audio (non planar) avcodec_fill_audio_frame(frame_final, audio_codec->channels, audio_codec->sample_fmt, (uint8_t *) final_samples, - audio_encoder_buffer_size, 0); + audio_encoder_buffer_size, 0); } // Increment PTS (in samples) @@ -1412,7 +1765,7 @@ /* encode the audio samples */ int got_packet_ptr = 0; - #if IS_FFMPEG_3_2 +#if IS_FFMPEG_3_2 // Encode audio (latest version of FFmpeg) int error_code; int ret = 0; @@ -1440,10 +1793,10 @@ ret = -1; } got_packet_ptr = ret; - #else +#else // Encode audio (older versions of FFmpeg) int error_code = avcodec_encode_audio2(audio_codec, &pkt, frame_final, &got_packet_ptr); - #endif +#endif /* if zero size, it means the image was buffered */ if (error_code == 0 && got_packet_ptr) { @@ -1465,27 +1818,25 @@ /* write the compressed frame in the media file */ int error_code = av_interleaved_write_frame(oc, &pkt); - if (error_code < 0) - { - ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_audio_packets ERROR [" + (string)av_err2str(error_code) + "]", "error_code", error_code, "", -1, "", -1, "", -1, "", -1, "", -1); + if (error_code < 0) { + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_audio_packets ERROR [" + (std::string) av_err2str(error_code) + "]", "error_code", error_code); } } - if (error_code < 0) - { - ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_audio_packets ERROR [" + (string)av_err2str(error_code) + "]", "error_code", error_code, "", -1, "", -1, "", -1, "", -1, "", -1); + if (error_code < 0) { + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_audio_packets ERROR [" + (std::string) av_err2str(error_code) + "]", "error_code", error_code); } // deallocate AVFrame av_freep(&(frame_final->data[0])); - AV_FREE_FRAME(&frame_final); + AV_FREE_FRAME(&frame_final); // deallocate memory for packet AV_FREE_PACKET(&pkt); // Reset position audio_input_position = 0; - final = false; + is_final = false; } // Delete arrays (if needed) @@ -1502,8 +1853,7 @@ } // Allocate an AVFrame object -AVFrame* FFmpegWriter::allocate_avframe(PixelFormat pix_fmt, int width, int height, int *buffer_size, uint8_t *new_buffer) -{ +AVFrame *FFmpegWriter::allocate_avframe(PixelFormat pix_fmt, int width, int height, int *buffer_size, uint8_t *new_buffer) { // Create an RGB AVFrame AVFrame *new_av_frame = NULL; @@ -1516,10 +1866,9 @@ *buffer_size = AV_GET_IMAGE_SIZE(pix_fmt, width, height); // Create buffer (if not provided) - if (!new_buffer) - { + if (!new_buffer) { // New Buffer - new_buffer = (uint8_t*)av_malloc(*buffer_size * sizeof(uint8_t)); + new_buffer = (uint8_t *) av_malloc(*buffer_size * sizeof(uint8_t)); // Attach buffer to AVFrame AV_COPY_PICTURE_DATA(new_av_frame, new_buffer, pix_fmt, width, height); new_av_frame->width = width; @@ -1532,8 +1881,7 @@ } // process video frame -void FFmpegWriter::process_video_packet(std::shared_ptr frame) -{ +void FFmpegWriter::process_video_packet(std::shared_ptr frame) { // Determine the height & width of the source image int source_image_width = frame->GetWidth(); int source_image_height = frame->GetHeight(); @@ -1552,7 +1900,7 @@ if (rescaler_position == num_of_rescalers) rescaler_position = 0; - #pragma omp task firstprivate(frame, scaler, source_image_width, source_image_height) +#pragma omp task firstprivate(frame, scaler, source_image_width, source_image_height) { // Allocate an RGB frame & final output frame int bytes_source = 0; @@ -1564,23 +1912,31 @@ pixels = frame->GetPixels(); // Init AVFrame for source image & final (converted image) - frame_source = allocate_avframe(PIX_FMT_RGBA, source_image_width, source_image_height, &bytes_source, (uint8_t*) pixels); - #if IS_FFMPEG_3_2 - AVFrame *frame_final = allocate_avframe((AVPixelFormat)(video_st->codecpar->format), info.width, info.height, &bytes_final, NULL); - #else + frame_source = allocate_avframe(PIX_FMT_RGBA, source_image_width, source_image_height, &bytes_source, (uint8_t *) pixels); +#if IS_FFMPEG_3_2 + AVFrame *frame_final; + #if HAVE_HW_ACCEL + if (hw_en_on && hw_en_supported) { + frame_final = allocate_avframe(AV_PIX_FMT_NV12, info.width, info.height, &bytes_final, NULL); + } else + #endif // HAVE_HW_ACCEL + { + frame_final = allocate_avframe((AVPixelFormat)(video_st->codecpar->format), info.width, info.height, &bytes_final, NULL); + } +#else AVFrame *frame_final = allocate_avframe(video_codec->pix_fmt, info.width, info.height, &bytes_final, NULL); - #endif +#endif // IS_FFMPEG_3_2 // Fill with data - AV_COPY_PICTURE_DATA(frame_source, (uint8_t*)pixels, PIX_FMT_RGBA, source_image_width, source_image_height); - ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::process_video_packet", "frame->number", frame->number, "bytes_source", bytes_source, "bytes_final", bytes_final, "", -1, "", -1, "", -1); + AV_COPY_PICTURE_DATA(frame_source, (uint8_t *) pixels, PIX_FMT_RGBA, source_image_width, source_image_height); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::process_video_packet", "frame->number", frame->number, "bytes_source", bytes_source, "bytes_final", bytes_final); // Resize & convert pixel format sws_scale(scaler, frame_source->data, frame_source->linesize, 0, - source_image_height, frame_final->data, frame_final->linesize); + source_image_height, frame_final->data, frame_final->linesize); // Add resized AVFrame to av_frames map - #pragma omp critical (av_frames_section) +#pragma omp critical (av_frames_section) add_avframe(frame, frame_final); // Deallocate memory @@ -1591,12 +1947,11 @@ } // write video frame -bool FFmpegWriter::write_video_packet(std::shared_ptr frame, AVFrame* frame_final) -{ +bool FFmpegWriter::write_video_packet(std::shared_ptr frame, AVFrame *frame_final) { #if (LIBAVFORMAT_VERSION_MAJOR >= 58) - ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_video_packet", "frame->number", frame->number, "oc->oformat->flags", oc->oformat->flags, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_video_packet", "frame->number", frame->number, "oc->oformat->flags", oc->oformat->flags); #else - ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_video_packet", "frame->number", frame->number, "oc->oformat->flags & AVFMT_RAWPICTURE", oc->oformat->flags & AVFMT_RAWPICTURE, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_video_packet", "frame->number", frame->number, "oc->oformat->flags & AVFMT_RAWPICTURE", oc->oformat->flags & AVFMT_RAWPICTURE); if (oc->oformat->flags & AVFMT_RAWPICTURE) { // Raw video case. @@ -1604,19 +1959,18 @@ av_init_packet(&pkt); pkt.flags |= AV_PKT_FLAG_KEY; - pkt.stream_index= video_st->index; - pkt.data= (uint8_t*)frame_final->data; - pkt.size= sizeof(AVPicture); + pkt.stream_index = video_st->index; + pkt.data = (uint8_t *) frame_final->data; + pkt.size = sizeof(AVPicture); // Increment PTS (in frames and scaled to the codec's timebase) - write_video_count += av_rescale_q(1, (AVRational){info.fps.den, info.fps.num}, video_codec->time_base); + write_video_count += av_rescale_q(1, (AVRational) {info.fps.den, info.fps.num}, video_codec->time_base); pkt.pts = write_video_count; /* write the compressed frame in the media file */ int error_code = av_interleaved_write_frame(oc, &pkt); - if (error_code < 0) - { - ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_video_packet ERROR [" + (string)av_err2str(error_code) + "]", "error_code", error_code, "", -1, "", -1, "", -1, "", -1, "", -1); + if (error_code < 0) { + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_video_packet ERROR [" + (std::string) av_err2str(error_code) + "]", "error_code", error_code); return false; } @@ -1625,7 +1979,7 @@ } else #endif - { + { AVPacket pkt; av_init_packet(&pkt); @@ -1637,34 +1991,63 @@ uint8_t *video_outbuf = NULL; // Increment PTS (in frames and scaled to the codec's timebase) - write_video_count += av_rescale_q(1, (AVRational){info.fps.den, info.fps.num}, video_codec->time_base); + write_video_count += av_rescale_q(1, (AVRational) {info.fps.den, info.fps.num}, video_codec->time_base); // Assign the initial AVFrame PTS from the frame counter frame_final->pts = write_video_count; - +#if HAVE_HW_ACCEL + if (hw_en_on && hw_en_supported) { + if (!(hw_frame = av_frame_alloc())) { + fprintf(stderr, "Error code: av_hwframe_alloc\n"); + } + if (av_hwframe_get_buffer(video_codec->hw_frames_ctx, hw_frame, 0) < 0) { + fprintf(stderr, "Error code: av_hwframe_get_buffer\n"); + } + if (!hw_frame->hw_frames_ctx) { + fprintf(stderr, "Error hw_frames_ctx.\n"); + } + hw_frame->format = AV_PIX_FMT_NV12; + if ( av_hwframe_transfer_data(hw_frame, frame_final, 0) < 0) { + fprintf(stderr, "Error while transferring frame data to surface.\n"); + } + av_frame_copy_props(hw_frame, frame_final); + } +#endif // HAVE_HW_ACCEL /* encode the image */ int got_packet_ptr = 0; int error_code = 0; - #if IS_FFMPEG_3_2 +#if IS_FFMPEG_3_2 // Write video packet (latest version of FFmpeg) int frameFinished = 0; - int ret = avcodec_send_frame(video_codec, frame_final); + int ret; + + #if HAVE_HW_ACCEL + if (hw_en_on && hw_en_supported) { + ret = avcodec_send_frame(video_codec, hw_frame); //hw_frame!!! + } else + #endif // HAVE_HW_ACCEL + { + ret = avcodec_send_frame(video_codec, frame_final); + } error_code = ret; if (ret < 0 ) { - ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_video_packet (Frame not sent)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); - if (ret == AVERROR(EAGAIN) ) - cerr << "Frame EAGAIN" << "\n"; - if (ret == AVERROR_EOF ) - cerr << "Frame AVERROR_EOF" << "\n"; + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_video_packet (Frame not sent)"); + if (ret == AVERROR(EAGAIN) ) { + std::cerr << "Frame EAGAIN" << "\n"; + } + if (ret == AVERROR_EOF ) { + std::cerr << "Frame AVERROR_EOF" << "\n"; + } avcodec_send_frame(video_codec, NULL); } else { while (ret >= 0) { ret = avcodec_receive_packet(video_codec, &pkt); - if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) { + + if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) { avcodec_flush_buffers(video_codec); got_packet_ptr = 0; - break; + break; } if (ret == 0) { got_packet_ptr = 1; @@ -1672,34 +2055,36 @@ } } } - #else - #if LIBAVFORMAT_VERSION_MAJOR >= 54 - // Write video packet (older than FFmpeg 3.2) - error_code = avcodec_encode_video2(video_codec, &pkt, frame_final, &got_packet_ptr); - if (error_code != 0 ) - cerr << "Frame AVERROR_EOF" << "\n"; - if (got_packet_ptr == 0 ) - cerr << "Frame gotpacket error" << "\n"; - #else - // Write video packet (even older versions of FFmpeg) - int video_outbuf_size = 200000; - video_outbuf = (uint8_t*) av_malloc(200000); - - /* encode the image */ - int out_size = avcodec_encode_video(video_codec, video_outbuf, video_outbuf_size, frame_final); - - /* if zero size, it means the image was buffered */ - if (out_size > 0) { - if(video_codec->coded_frame->key_frame) - pkt.flags |= AV_PKT_FLAG_KEY; - pkt.data= video_outbuf; - pkt.size= out_size; +#else +#if LIBAVFORMAT_VERSION_MAJOR >= 54 + // Write video packet (older than FFmpeg 3.2) + error_code = avcodec_encode_video2(video_codec, &pkt, frame_final, &got_packet_ptr); + if (error_code != 0) { + std::cerr << "Frame AVERROR_EOF" << "\n"; + } + if (got_packet_ptr == 0) { + std::cerr << "Frame gotpacket error" << "\n"; + } +#else + // Write video packet (even older versions of FFmpeg) + int video_outbuf_size = 200000; + video_outbuf = (uint8_t*) av_malloc(200000); - // got data back (so encode this frame) - got_packet_ptr = 1; - } - #endif - #endif + /* encode the image */ + int out_size = avcodec_encode_video(video_codec, video_outbuf, video_outbuf_size, frame_final); + + /* if zero size, it means the image was buffered */ + if (out_size > 0) { + if(video_codec->coded_frame->key_frame) + pkt.flags |= AV_PKT_FLAG_KEY; + pkt.data= video_outbuf; + pkt.size= out_size; + + // got data back (so encode this frame) + got_packet_ptr = 1; + } +#endif // LIBAVFORMAT_VERSION_MAJOR >= 54 +#endif // IS_FFMPEG_3_2 /* if zero size, it means the image was buffered */ if (error_code == 0 && got_packet_ptr) { @@ -1719,9 +2104,8 @@ /* write the compressed frame in the media file */ int error_code = av_interleaved_write_frame(oc, &pkt); - if (error_code < 0) - { - ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_video_packet ERROR [" + (string)av_err2str(error_code) + "]", "error_code", error_code, "", -1, "", -1, "", -1, "", -1, "", -1); + if (error_code < 0) { + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_video_packet ERROR [" + (std::string) av_err2str(error_code) + "]", "error_code", error_code); return false; } } @@ -1732,6 +2116,14 @@ // Deallocate packet AV_FREE_PACKET(&pkt); +#if HAVE_HW_ACCEL + if (hw_en_on && hw_en_supported) { + if (hw_frame) { + av_frame_free(&hw_frame); + hw_frame = NULL; + } + } +#endif // HAVE_HW_ACCEL } // Success @@ -1739,20 +2131,30 @@ } // Output the ffmpeg info about this format, streams, and codecs (i.e. dump format) -void FFmpegWriter::OutputStreamInfo() -{ +void FFmpegWriter::OutputStreamInfo() { // output debug info av_dump_format(oc, 0, path.c_str(), 1); } // Init a collection of software rescalers (thread safe) -void FFmpegWriter::InitScalers(int source_width, int source_height) -{ +void FFmpegWriter::InitScalers(int source_width, int source_height) { + int scale_mode = SWS_FAST_BILINEAR; + if (openshot::Settings::Instance()->HIGH_QUALITY_SCALING) { + scale_mode = SWS_BICUBIC; + } + // Init software rescalers vector (many of them, one for each thread) - for (int x = 0; x < num_of_rescalers; x++) - { + for (int x = 0; x < num_of_rescalers; x++) { // Init the software scaler from FFMpeg - img_convert_ctx = sws_getContext(source_width, source_height, PIX_FMT_RGBA, info.width, info.height, AV_GET_CODEC_PIXEL_FORMAT(video_st, video_st->codec), SWS_LANCZOS, NULL, NULL, NULL); +#if HAVE_HW_ACCEL + if (hw_en_on && hw_en_supported) { + img_convert_ctx = sws_getContext(source_width, source_height, PIX_FMT_RGBA, info.width, info.height, AV_PIX_FMT_NV12, scale_mode, NULL, NULL, NULL); + } else +#endif // HAVE_HW_ACCEL + { + img_convert_ctx = sws_getContext(source_width, source_height, PIX_FMT_RGBA, info.width, info.height, AV_GET_CODEC_PIXEL_FORMAT(video_st, video_st->codec), scale_mode, + NULL, NULL, NULL); + } // Add rescaler to vector image_rescalers.push_back(img_convert_ctx); @@ -1766,8 +2168,7 @@ } // Remove & deallocate all software scalers -void FFmpegWriter::RemoveScalers() -{ +void FFmpegWriter::RemoveScalers() { // Close all rescalers for (int x = 0; x < num_of_rescalers; x++) sws_freeContext(image_rescalers[x]); diff -Nru libopenshot-0.2.2+dfsg1/src/Fraction.cpp libopenshot-0.2.5+dfsg1/src/Fraction.cpp --- libopenshot-0.2.2+dfsg1/src/Fraction.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/Fraction.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for Fraction class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the diff -Nru libopenshot-0.2.2+dfsg1/src/Frame.cpp libopenshot-0.2.5+dfsg1/src/Frame.cpp --- libopenshot-0.2.2+dfsg1/src/Frame.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/Frame.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for Frame class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -40,10 +43,10 @@ // initialize the audio samples to zero (silence) audio->clear(); -}; +} // Constructor - image only (48kHz audio silence) -Frame::Frame(int64_t number, int width, int height, string color) +Frame::Frame(int64_t number, int width, int height, std::string color) : number(number), pixel_ratio(1,1), channels(2), width(width), height(height), color(color), channel_layout(LAYOUT_STEREO), sample_rate(44100), qbuffer(NULL), has_audio_data(false), has_image_data(false), max_audio_sample(0) @@ -53,7 +56,7 @@ // initialize the audio samples to zero (silence) audio->clear(); -}; +} // Constructor - audio only (300x200 blank image) Frame::Frame(int64_t number, int samples, int channels) : @@ -66,10 +69,10 @@ // initialize the audio samples to zero (silence) audio->clear(); -}; +} // Constructor - image & audio -Frame::Frame(int64_t number, int width, int height, string color, int samples, int channels) +Frame::Frame(int64_t number, int width, int height, std::string color, int samples, int channels) : number(number), pixel_ratio(1,1), channels(channels), width(width), height(height), color(color), channel_layout(LAYOUT_STEREO), sample_rate(44100), qbuffer(NULL), has_audio_data(false), has_image_data(false), max_audio_sample(0) @@ -79,7 +82,7 @@ // initialize the audio samples to zero (silence) audio->clear(); -}; +} // Copy constructor @@ -106,11 +109,12 @@ width = other.width; height = other.height; channel_layout = other.channel_layout; - has_audio_data = other.has_image_data; + has_audio_data = other.has_audio_data; has_image_data = other.has_image_data; sample_rate = other.sample_rate; pixel_ratio = Fraction(other.pixel_ratio.num, other.pixel_ratio.den); color = other.color; + max_audio_sample = other.max_audio_sample; if (other.image) image = std::shared_ptr(new QImage(*(other.image))); @@ -120,7 +124,7 @@ wave_image = std::shared_ptr(new QImage(*(other.wave_image))); } -// Descructor +// Destructor Frame::~Frame() { // Clear all pointers image.reset(); @@ -263,7 +267,7 @@ return wave_image; } -// Clear the waveform image (and deallocate it's memory) +// Clear the waveform image (and deallocate its memory) void Frame::ClearWaveform() { if (wave_image) @@ -277,7 +281,7 @@ wave_image = GetWaveform(width, height, Red, Green, Blue, Alpha); // Return array of pixel packets - return wave_image->bits(); + return wave_image->constBits(); } // Display the wave form @@ -337,7 +341,7 @@ float* Frame::GetPlanarAudioSamples(int new_sample_rate, AudioResampler* resampler, int* sample_count) { float *output = NULL; - AudioSampleBuffer *buffer(audio.get()); + juce::AudioSampleBuffer *buffer(audio.get()); int num_of_channels = audio->getNumChannels(); int num_of_samples = GetAudioSamplesCount(); @@ -383,7 +387,7 @@ float* Frame::GetInterleavedAudioSamples(int new_sample_rate, AudioResampler* resampler, int* sample_count) { float *output = NULL; - AudioSampleBuffer *buffer(audio.get()); + juce::AudioSampleBuffer *buffer(audio.get()); int num_of_channels = audio->getNumChannels(); int num_of_samples = GetAudioSamplesCount(); @@ -427,7 +431,7 @@ // Get number of audio channels int Frame::GetAudioChannelsCount() { - const GenericScopedLock lock(addingAudioSection); + const GenericScopedLock lock(addingAudioSection); if (audio) return audio->getNumChannels(); else @@ -437,7 +441,7 @@ // Get number of audio samples int Frame::GetAudioSamplesCount() { - const GenericScopedLock lock(addingAudioSection); + const GenericScopedLock lock(addingAudioSection); return max_audio_sample; } @@ -470,14 +474,36 @@ AddColor(width, height, color); // Return array of pixel packets - return image->bits(); + return image->constBits(); } // Get pixel data (for only a single scan-line) const unsigned char* Frame::GetPixels(int row) { // Return array of pixel packets - return image->scanLine(row); + return image->constScanLine(row); +} + +// Check a specific pixel color value (returns True/False) +bool Frame::CheckPixel(int row, int col, int red, int green, int blue, int alpha, int threshold) { + int col_pos = col * 4; // Find column array position + if (!image || row < 0 || row >= (height - 1) || + col_pos < 0 || col_pos >= (width - 1) ) { + // invalid row / col + return false; + } + // Check pixel color + const unsigned char* pixels = GetPixels(row); + if (pixels[col_pos + 0] >= (red - threshold) && pixels[col_pos + 0] <= (red + threshold) && + pixels[col_pos + 1] >= (green - threshold) && pixels[col_pos + 1] <= (green + threshold) && + pixels[col_pos + 2] >= (blue - threshold) && pixels[col_pos + 2] <= (blue + threshold) && + pixels[col_pos + 3] >= (alpha - threshold) && pixels[col_pos + 3] <= (alpha + threshold)) { + // Pixel color matches successfully + return true; + } else { + // Pixel color does not match + return false; + } } // Set Pixel Aspect Ratio @@ -549,13 +575,13 @@ // Save the frame image to the specified path. The image format is determined from the extension (i.e. image.PNG, image.JPEG) -void Frame::Save(string path, float scale, string format, int quality) +void Frame::Save(std::string path, float scale, std::string format, int quality) { // Get preview image std::shared_ptr previewImage = GetImage(); // scale image if needed - if (abs(scale) > 1.001 || abs(scale) < 0.999) + if (fabs(scale) > 1.001 || fabs(scale) < 0.999) { int new_width = width; int new_height = height; @@ -580,8 +606,8 @@ } // Thumbnail the frame image to the specified path. The image format is determined from the extension (i.e. image.PNG, image.JPEG) -void Frame::Thumbnail(string path, int new_width, int new_height, string mask_path, string overlay_path, - string background_color, bool ignore_aspect, string format, int quality, float rotate) { +void Frame::Thumbnail(std::string path, int new_width, int new_height, std::string mask_path, std::string overlay_path, + std::string background_color, bool ignore_aspect, std::string format, int quality, float rotate) { // Create blank thumbnail image & fill background color std::shared_ptr thumbnail = std::shared_ptr(new QImage(new_width, new_height, QImage::Format_RGBA8888)); @@ -667,7 +693,7 @@ // Get pixels unsigned char *pixels = (unsigned char *) thumbnail->bits(); - unsigned char *mask_pixels = (unsigned char *) mask->bits(); + const unsigned char *mask_pixels = (const unsigned char *) mask->constBits(); // Convert the mask image to grayscale // Loop through pixels @@ -704,13 +730,13 @@ } // Add (or replace) pixel data to the frame (based on a solid color) -void Frame::AddColor(int new_width, int new_height, string new_color) +void Frame::AddColor(int new_width, int new_height, std::string new_color) { // Set color color = new_color; // Create new image object, and fill with pixel data - const GenericScopedLock lock(addingImageSection); + const GenericScopedLock lock(addingImageSection); #pragma omp critical (AddImage) { image = std::shared_ptr(new QImage(new_width, new_height, QImage::Format_RGBA8888)); @@ -728,7 +754,7 @@ void Frame::AddImage(int new_width, int new_height, int bytes_per_pixel, QImage::Format type, const unsigned char *pixels_) { // Create new buffer - const GenericScopedLock lock(addingImageSection); + const GenericScopedLock lock(addingImageSection); int buffer_size = new_width * new_height * bytes_per_pixel; qbuffer = new unsigned char[buffer_size](); @@ -759,7 +785,7 @@ return; // assign image data - const GenericScopedLock lock(addingImageSection); + const GenericScopedLock lock(addingImageSection); #pragma omp critical (AddImage) { image = new_image; @@ -792,30 +818,38 @@ // Ignore image of different sizes or formats bool ret=false; #pragma omp critical (AddImage) - if (image == new_image || image->size() != image->size() || image->format() != image->format()) - ret=true; - if (ret) + { + if (image == new_image || image->size() != new_image->size()) { + ret = true; + } + else if (new_image->format() != image->format()) { + new_image = std::shared_ptr(new QImage(new_image->convertToFormat(image->format()))); + } + } + if (ret) { return; - + } + // Get the frame's image - const GenericScopedLock lock(addingImageSection); + const GenericScopedLock lock(addingImageSection); #pragma omp critical (AddImage) { - const unsigned char *pixels = image->bits(); - const unsigned char *new_pixels = new_image->bits(); + unsigned char *pixels = image->bits(); + const unsigned char *new_pixels = new_image->constBits(); // Loop through the scanlines of the image (even or odd) int start = 0; if (only_odd_lines) start = 1; - for (int row = start; row < image->height(); row += 2) { - memcpy((unsigned char *) pixels, new_pixels + (row * image->bytesPerLine()), image->bytesPerLine()); - new_pixels += image->bytesPerLine(); - } + + for (int row = start; row < image->height(); row += 2) { + int offset = row * image->bytesPerLine(); + memcpy(pixels + offset, new_pixels + offset, image->bytesPerLine()); + } // Update height and width - width = image->width(); height = image->height(); + width = image->width(); has_image_data = true; } } @@ -825,7 +859,7 @@ // Resize audio container to hold more (or less) samples and channels void Frame::ResizeAudio(int channels, int length, int rate, ChannelLayout layout) { - const GenericScopedLock lock(addingAudioSection); + const GenericScopedLock lock(addingAudioSection); // Resize JUCE audio buffer audio->setSize(channels, length, true, true, false); @@ -838,7 +872,7 @@ // Add audio samples to a specific channel void Frame::AddAudio(bool replaceSamples, int destChannel, int destStartSample, const float* source, int numSamples, float gainToApplyToSource = 1.0f) { - const GenericScopedLock lock(addingAudioSection); + const GenericScopedLock lock(addingAudioSection); #pragma omp critical (adding_audio) { // Clamp starting sample to 0 @@ -869,7 +903,7 @@ // Apply gain ramp (i.e. fading volume) void Frame::ApplyGainRamp(int destChannel, int destStartSample, int numSamples, float initial_gain = 0.0f, float final_gain = 1.0f) { - const GenericScopedLock lock(addingAudioSection); + const GenericScopedLock lock(addingAudioSection); // Apply gain ramp audio->applyGainRamp(destChannel, destStartSample, numSamples, initial_gain, final_gain); @@ -896,7 +930,7 @@ AddColor(width, height, "#000000"); // Get the pixels from the frame image - QRgb const *tmpBits = (const QRgb*)image->bits(); + const QRgb *tmpBits = (const QRgb*)image->constBits(); // Create new image object, and fill with pixel data std::shared_ptr magick_image = std::shared_ptr(new Magick::Image(image->width(), image->height(),"RGBA", Magick::CharPixel, tmpBits)); @@ -904,7 +938,7 @@ // Give image a transparent background color magick_image->backgroundColor(Magick::Color("none")); magick_image->virtualPixelMethod(Magick::TransparentVirtualPixelMethod); - magick_image->matte(true); + MAGICK_IMAGE_ALPHA(magick_image, true); return magick_image; } @@ -923,20 +957,12 @@ qbuffer = new unsigned char[bufferSize](); unsigned char *buffer = (unsigned char*)qbuffer; - // Iterate through the pixel packets, and load our own buffer - // Each color needs to be scaled to 8 bit (using the ImageMagick built-in ScaleQuantumToChar function) - int numcopied = 0; - Magick::PixelPacket *pixels = new_image->getPixels(0,0, new_image->columns(), new_image->rows()); - for (int n = 0, i = 0; n < new_image->columns() * new_image->rows(); n += 1, i += 4) { - buffer[i+0] = MagickCore::ScaleQuantumToChar((Magick::Quantum) pixels[n].red); - buffer[i+1] = MagickCore::ScaleQuantumToChar((Magick::Quantum) pixels[n].green); - buffer[i+2] = MagickCore::ScaleQuantumToChar((Magick::Quantum) pixels[n].blue); - buffer[i+3] = 255 - MagickCore::ScaleQuantumToChar((Magick::Quantum) pixels[n].opacity); - numcopied+=4; - } + MagickCore::ExceptionInfo exception; + // TODO: Actually do something, if we get an exception here + MagickCore::ExportImagePixels(new_image->constImage(), 0, 0, new_image->columns(), new_image->rows(), "RGBA", Magick::CharPixel, buffer, &exception); - // Create QImage of frame data - image = std::shared_ptr(new QImage(qbuffer, width, height, width * BPP, QImage::Format_RGBA8888, (QImageCleanupFunction) &cleanUpBuffer, (void*) qbuffer)); + // Create QImage of frame data + image = std::shared_ptr(new QImage(qbuffer, width, height, width * BPP, QImage::Format_RGBA8888, (QImageCleanupFunction) &cleanUpBuffer, (void*) qbuffer)); // Update height and width width = image->width(); @@ -952,21 +978,25 @@ if (!GetAudioSamplesCount()) return; - AudioDeviceManager deviceManager; - deviceManager.initialise (0, /* number of input channels */ + juce::AudioDeviceManager deviceManager; + String error = deviceManager.initialise (0, /* number of input channels */ 2, /* number of output channels */ 0, /* no XML settings.. */ true /* select default device on failure */); - //deviceManager.playTestSound(); - AudioSourcePlayer audioSourcePlayer; + // Output error (if any) + if (error.isNotEmpty()) { + cout << "Error on initialise(): " << error.toStdString() << endl; + } + + juce::AudioSourcePlayer audioSourcePlayer; deviceManager.addAudioCallback (&audioSourcePlayer); ScopedPointer my_source; my_source = new AudioBufferSource(audio.get()); // Create TimeSliceThread for audio buffering - TimeSliceThread my_thread("Audio buffer thread"); + juce::TimeSliceThread my_thread("Audio buffer thread"); // Start thread my_thread.startThread(); @@ -982,7 +1012,7 @@ // Create MIXER - MixerAudioSource mixer; + juce::MixerAudioSource mixer; mixer.addInputSource(&transport1, false); audioSourcePlayer.setSource (&mixer); @@ -1025,7 +1055,7 @@ // Add audio silence void Frame::AddAudioSilence(int numSamples) { - const GenericScopedLock lock(addingAudioSection); + const GenericScopedLock lock(addingAudioSection); // Resize audio container audio->setSize(channels, numSamples, false, true, false); diff -Nru libopenshot-0.2.2+dfsg1/src/FrameMapper.cpp libopenshot-0.2.5+dfsg1/src/FrameMapper.cpp --- libopenshot-0.2.2+dfsg1/src/FrameMapper.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/FrameMapper.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for the FrameMapper class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -54,9 +57,6 @@ // Adjust cache size based on size of frame and audio final_cache.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 2, info.width, info.height, info.sample_rate, info.channels); - - // init mapping between original and target frames - Init(); } // Destructor @@ -64,6 +64,8 @@ if (is_open) // Auto Close if not already Close(); + + reader = NULL; } /// Get the current reader @@ -73,7 +75,7 @@ return reader; else // Throw error if reader not initialized - throw ReaderClosed("No Reader has been initialized for FrameMapper. Call Reader(*reader) before calling this method.", ""); + throw ReaderClosed("No Reader has been initialized for FrameMapper. Call Reader(*reader) before calling this method."); } void FrameMapper::AddField(int64_t frame) @@ -97,7 +99,7 @@ // whether the frame rate is increasing or decreasing. void FrameMapper::Init() { - ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::Init (Calculate frame mappings)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::Init (Calculate frame mappings)"); // Do not initialize anything if just a picture with no audio if (info.has_video and !info.has_audio and info.has_single_image) @@ -205,22 +207,23 @@ } } else { - // Map the remaining framerates using a simple Keyframe curve - // Calculate the difference (to be used as a multiplier) + // Map the remaining framerates using a linear algorithm double rate_diff = target.ToDouble() / original.ToDouble(); int64_t new_length = reader->info.video_length * rate_diff; - // Build curve for framerate mapping - Keyframe rate_curve; - rate_curve.AddPoint(1, 1, LINEAR); - rate_curve.AddPoint(new_length, reader->info.video_length, LINEAR); + // Calculate the value difference + double value_increment = (reader->info.video_length + 1) / (double) (new_length); // Loop through curve, and build list of frames + double original_frame_num = 1.0f; for (int64_t frame_num = 1; frame_num <= new_length; frame_num++) { // Add 2 fields per frame - AddField(rate_curve.GetInt(frame_num)); - AddField(rate_curve.GetInt(frame_num)); + AddField(round(original_frame_num)); + AddField(round(original_frame_num)); + + // Increment original frame number + original_frame_num += value_increment; } } @@ -232,7 +235,7 @@ int64_t start_samples_frame = 1; int start_samples_position = 0; - for (int64_t field = 1; field <= fields.size(); field++) + for (std::vector::size_type field = 1; field <= fields.size(); field++) { // Get the current field Field f = fields[field - 1]; @@ -310,6 +313,11 @@ MappedFrame FrameMapper::GetMappedFrame(int64_t TargetFrameNumber) { + // Check if mappings are dirty (and need to be recalculated) + if (is_dirty) + // Recalculate mappings + Init(); + // Ignore mapping on single image readers if (info.has_video and !info.has_audio and info.has_single_image) { // Return the same number @@ -329,12 +337,12 @@ // frame too small, return error throw OutOfBoundsFrame("An invalid frame was requested.", TargetFrameNumber, frames.size()); - else if (TargetFrameNumber > frames.size()) + else if (TargetFrameNumber > (int64_t)frames.size()) // frame too large, set to end frame TargetFrameNumber = frames.size(); // Debug output - ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::GetMappedFrame", "TargetFrameNumber", TargetFrameNumber, "frames.size()", frames.size(), "frames[...].Odd", frames[TargetFrameNumber - 1].Odd.Frame, "frames[...].Even", frames[TargetFrameNumber - 1].Even.Frame, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::GetMappedFrame", "TargetFrameNumber", TargetFrameNumber, "frames.size()", frames.size(), "frames[...].Odd", frames[TargetFrameNumber - 1].Odd.Frame, "frames[...].Even", frames[TargetFrameNumber - 1].Even.Frame); // Return frame return frames[TargetFrameNumber - 1]; @@ -350,10 +358,7 @@ try { // Debug output - ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::GetOrCreateFrame (from reader)", "number", number, "samples_in_frame", samples_in_frame, "", -1, "", -1, "", -1, "", -1); - - // Set max image size (used for performance optimization) - reader->SetMaxSize(max_width, max_height); + ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::GetOrCreateFrame (from reader)", "number", number, "samples_in_frame", samples_in_frame); // Attempt to get a frame (but this could fail if a reader has just been closed) new_frame = reader->GetFrame(number); @@ -370,7 +375,7 @@ } // Debug output - ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::GetOrCreateFrame (create blank)", "number", number, "samples_in_frame", samples_in_frame, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::GetOrCreateFrame (create blank)", "number", number, "samples_in_frame", samples_in_frame); // Create blank frame new_frame = std::make_shared(number, info.width, info.height, "#000000", samples_in_frame, reader->info.channels); @@ -404,14 +409,14 @@ int minimum_frames = 1; // Debug output - ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::GetFrame (Loop through frames)", "requested_frame", requested_frame, "minimum_frames", minimum_frames, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::GetFrame (Loop through frames)", "requested_frame", requested_frame, "minimum_frames", minimum_frames); // Loop through all requested frames for (int64_t frame_number = requested_frame; frame_number < requested_frame + minimum_frames; frame_number++) { // Debug output - ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::GetFrame (inside omp for loop)", "frame_number", frame_number, "minimum_frames", minimum_frames, "requested_frame", requested_frame, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::GetFrame (inside omp for loop)", "frame_number", frame_number, "minimum_frames", minimum_frames, "requested_frame", requested_frame); // Get the mapped frame MappedFrame mapped = GetMappedFrame(frame_number); @@ -629,7 +634,7 @@ { if (reader) { - ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::Open", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::Open"); // Open the reader reader->Open(); @@ -644,11 +649,21 @@ // Create a scoped lock, allowing only a single thread to run the following code at one time const GenericScopedLock lock(getFrameCriticalSection); - ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::Close", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::Close"); // Close internal reader reader->Close(); + // Clear the fields & frames lists + fields.clear(); + frames.clear(); + + // Mark as dirty + is_dirty = true; + + // Clear cache + final_cache.Clear(); + // Deallocate resample buffer if (avr) { SWR_CLOSE(avr); @@ -660,14 +675,14 @@ // Generate JSON string of this object -string FrameMapper::Json() { +std::string FrameMapper::Json() const { // Return formatted string return JsonValue().toStyledString(); } -// Generate Json::JsonValue for this object -Json::Value FrameMapper::JsonValue() { +// Generate Json::Value for this object +Json::Value FrameMapper::JsonValue() const { // Create root json object Json::Value root = ReaderBase::JsonValue(); // get parent properties @@ -678,30 +693,24 @@ } // Load JSON string into this object -void FrameMapper::SetJson(string value) { +void FrameMapper::SetJson(const std::string value) { // Parse JSON string into JSON objects - Json::Value root; - Json::Reader reader; - bool success = reader.parse( value, root ); - if (!success) - // Raise exception - throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); - try { + const Json::Value root = openshot::stringToJson(value); // Set all values that match SetJsonValue(root); } - catch (exception e) + catch (const std::exception& e) { // Error parsing JSON (or missing keys) - throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); } } -// Load Json::JsonValue into this object -void FrameMapper::SetJsonValue(Json::Value root) { +// Load Json::Value into this object +void FrameMapper::SetJsonValue(const Json::Value root) { // Set parent data ReaderBase::SetJsonValue(root); @@ -746,14 +755,16 @@ SWR_FREE(&avr); avr = NULL; } - - // Re-init mapping - Init(); } // Resample audio and map channels (if needed) void FrameMapper::ResampleMappedAudio(std::shared_ptr frame, int64_t original_frame_number) { + // Check if mappings are dirty (and need to be recalculated) + if (is_dirty) + // Recalculate mappings + Init(); + // Init audio buffers / variables int total_frame_samples = 0; int channels_in_frame = frame->GetAudioChannelsCount(); @@ -761,7 +772,7 @@ int samples_in_frame = frame->GetAudioSamplesCount(); ChannelLayout channel_layout_in_frame = frame->ChannelsLayout(); - ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::ResampleMappedAudio", "frame->number", frame->number, "original_frame_number", original_frame_number, "channels_in_frame", channels_in_frame, "samples_in_frame", samples_in_frame, "sample_rate_in_frame", sample_rate_in_frame, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::ResampleMappedAudio", "frame->number", frame->number, "original_frame_number", original_frame_number, "channels_in_frame", channels_in_frame, "samples_in_frame", samples_in_frame, "sample_rate_in_frame", sample_rate_in_frame); // Get audio sample array float* frame_samples_float = NULL; @@ -797,7 +808,7 @@ if (error_code < 0) { - ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::ResampleMappedAudio ERROR [" + (string)av_err2str(error_code) + "]", "error_code", error_code, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::ResampleMappedAudio ERROR [" + (std::string)av_err2str(error_code) + "]", "error_code", error_code); throw ErrorEncodingVideo("Error while resampling audio in frame mapper", frame->number); } @@ -896,7 +907,7 @@ // Add samples to frame for this channel frame->AddAudio(true, channel_filter, 0, channel_buffer, position, 1.0f); - ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::ResampleMappedAudio (Add audio to channel)", "number of samples", position, "channel_filter", channel_filter, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::ResampleMappedAudio (Add audio to channel)", "number of samples", position, "channel_filter", channel_filter); } // Update frame's audio meta data diff -Nru libopenshot-0.2.2+dfsg1/src/ImageReader.cpp libopenshot-0.2.5+dfsg1/src/ImageReader.cpp --- libopenshot-0.2.2+dfsg1/src/ImageReader.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/ImageReader.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for ImageReader class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -25,20 +28,23 @@ * along with OpenShot Library. If not, see . */ +// Require ImageMagick support +#ifdef USE_IMAGEMAGICK + #include "../include/ImageReader.h" using namespace openshot; -ImageReader::ImageReader(string path) : path(path), is_open(false) +ImageReader::ImageReader(std::string path) : path(path), is_open(false) { - // Open and Close the reader, to populate it's attributes (such as height, width, etc...) + // Open and Close the reader, to populate its attributes (such as height, width, etc...) Open(); Close(); } -ImageReader::ImageReader(string path, bool inspect_reader) : path(path), is_open(false) +ImageReader::ImageReader(std::string path, bool inspect_reader) : path(path), is_open(false) { - // Open and Close the reader, to populate it's attributes (such as height, width, etc...) + // Open and Close the reader, to populate its attributes (such as height, width, etc...) if (inspect_reader) { Open(); Close(); @@ -59,9 +65,9 @@ // Give image a transparent background color image->backgroundColor(Magick::Color("none")); - image->matte(true); + MAGICK_IMAGE_ALPHA(image, true); } - catch (Magick::Exception e) { + catch (const Magick::Exception& e) { // raise exception throw InvalidFile("File could not be opened.", path); } @@ -76,7 +82,7 @@ info.height = image->size().height(); info.pixel_ratio.num = 1; info.pixel_ratio.den = 1; - info.duration = 60 * 60 * 24; // 24 hour duration + info.duration = 60 * 60 * 1; // 1 hour duration info.fps.num = 30; info.fps.den = 1; info.video_timebase.num = 1; @@ -106,7 +112,7 @@ { // Mark as "closed" is_open = false; - + // Delete the image image.reset(); } @@ -130,14 +136,14 @@ } // Generate JSON string of this object -string ImageReader::Json() { +std::string ImageReader::Json() const { // Return formatted string return JsonValue().toStyledString(); } -// Generate Json::JsonValue for this object -Json::Value ImageReader::JsonValue() { +// Generate Json::Value for this object +Json::Value ImageReader::JsonValue() const { // Create root json object Json::Value root = ReaderBase::JsonValue(); // get parent properties @@ -149,30 +155,24 @@ } // Load JSON string into this object -void ImageReader::SetJson(string value) { +void ImageReader::SetJson(const std::string value) { // Parse JSON string into JSON objects - Json::Value root; - Json::Reader reader; - bool success = reader.parse( value, root ); - if (!success) - // Raise exception - throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); - try { + const Json::Value root = openshot::stringToJson(value); // Set all values that match SetJsonValue(root); } - catch (exception e) + catch (const std::exception& e) { // Error parsing JSON (or missing keys) - throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); } } -// Load Json::JsonValue into this object -void ImageReader::SetJsonValue(Json::Value root) { +// Load Json::Value into this object +void ImageReader::SetJsonValue(const Json::Value root) { // Set parent data ReaderBase::SetJsonValue(root); @@ -188,3 +188,5 @@ Open(); } } + +#endif //USE_IMAGEMAGICK diff -Nru libopenshot-0.2.2+dfsg1/src/ImageWriter.cpp libopenshot-0.2.5+dfsg1/src/ImageWriter.cpp --- libopenshot-0.2.2+dfsg1/src/ImageWriter.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/ImageWriter.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for ImageWriter class * @author Jonathan Thomas , Fabrice Bellard * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2013 OpenShot Studios, LLC, Fabrice Bellard + * Copyright (c) 2008-2019 OpenShot Studios, LLC, Fabrice Bellard * (http://www.openshotstudios.com). This file is part of * OpenShot Library (http://www.openshot.org), an open-source project * dedicated to delivering high quality video editing and animation solutions @@ -28,11 +31,14 @@ * along with OpenShot Library. If not, see . */ +//Require ImageMagick support +#ifdef USE_IMAGEMAGICK + #include "../include/ImageWriter.h" using namespace openshot; -ImageWriter::ImageWriter(string path) : +ImageWriter::ImageWriter(std::string path) : path(path), cache_size(8), is_writing(false), write_video_count(0), image_quality(75), number_of_loops(1), combine_frames(true), is_open(false) { @@ -42,7 +48,7 @@ } // Set video export options -void ImageWriter::SetVideoOptions(string format, Fraction fps, int width, int height, +void ImageWriter::SetVideoOptions(std::string format, Fraction fps, int width, int height, int quality, int loops, bool combine) { // Set frames per second (if provided) @@ -97,7 +103,7 @@ std::shared_ptr frame_image = frame->GetMagickImage(); frame_image->magick( info.vcodec ); frame_image->backgroundColor(Magick::Color("none")); - frame_image->matte(true); + MAGICK_IMAGE_ALPHA(frame_image, true); frame_image->quality(image_quality); frame_image->animationDelay(info.video_timebase.ToFloat() * 100); frame_image->animationIterations(number_of_loops); @@ -122,7 +128,7 @@ // Write a block of frames from a reader void ImageWriter::WriteFrame(ReaderBase* reader, int64_t start, int64_t length) { - ZmqLogger::Instance()->AppendDebugMethod("ImageWriter::WriteFrame (from Reader)", "start", start, "length", length, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("ImageWriter::WriteFrame (from Reader)", "start", start, "length", length); // Loop through each frame (and encoded it) for (int64_t number = start; number <= length; number++) @@ -150,6 +156,7 @@ // Close writer is_open = false; - ZmqLogger::Instance()->AppendDebugMethod("ImageWriter::Close", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("ImageWriter::Close"); } +#endif //USE_IMAGEMAGICK diff -Nru libopenshot-0.2.2+dfsg1/src/Json.cpp libopenshot-0.2.5+dfsg1/src/Json.cpp --- libopenshot-0.2.2+dfsg1/src/Json.cpp 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/Json.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -0,0 +1,50 @@ +/** + * @file + * @brief Helper functions for Json parsing + * @author FeRD (Frank Dana) + * + * @ref License + */ + +/* LICENSE + * + * Copyright (c) 2008-2019 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#include "../include/Json.h" + +const Json::Value openshot::stringToJson(const std::string value) { + + // Parse JSON string into JSON objects + Json::Value root; + Json::CharReaderBuilder rbuilder; + Json::CharReader* reader(rbuilder.newCharReader()); + + std::string errors; + bool success = reader->parse( value.c_str(), value.c_str() + value.size(), + &root, &errors ); + delete reader; + + if (!success) + // Raise exception + throw openshot::InvalidJSON("JSON could not be parsed (or is invalid)"); + + return root; +} diff -Nru libopenshot-0.2.2+dfsg1/src/KeyFrame.cpp libopenshot-0.2.5+dfsg1/src/KeyFrame.cpp --- libopenshot-0.2.2+dfsg1/src/KeyFrame.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/KeyFrame.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for the Keyframe class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -26,64 +29,121 @@ */ #include "../include/KeyFrame.h" +#include +#include +#include using namespace std; using namespace openshot; -// Because points can be added in any order, we need to reorder them -// in ascending order based on the point.co.X value. This simplifies -// processing the curve, due to all the points going from left to right. -void Keyframe::ReorderPoints() { - // Loop through all coordinates, and sort them by the X attribute - for (int64_t x = 0; x < Points.size(); x++) { - int64_t compare_index = x; - int64_t smallest_index = x; - - for (int64_t compare_index = x + 1; compare_index < Points.size(); compare_index++) { - if (Points[compare_index].co.X < Points[smallest_index].co.X) { - smallest_index = compare_index; +namespace { + bool IsPointBeforeX(Point const & p, double const x) { + return p.co.X < x; + } + + double InterpolateLinearCurve(Point const & left, Point const & right, double const target) { + double const diff_Y = right.co.Y - left.co.Y; + double const diff_X = right.co.X - left.co.X; + double const slope = diff_Y / diff_X; + return left.co.Y + slope * (target - left.co.X); + } + + double InterpolateBezierCurve(Point const & left, Point const & right, double const target, double const allowed_error) { + double const X_diff = right.co.X - left.co.X; + double const Y_diff = right.co.Y - left.co.Y; + Coordinate const p0 = left.co; + Coordinate const p1 = Coordinate(p0.X + left.handle_right.X * X_diff, p0.Y + left.handle_right.Y * Y_diff); + Coordinate const p2 = Coordinate(p0.X + right.handle_left.X * X_diff, p0.Y + right.handle_left.Y * Y_diff); + Coordinate const p3 = right.co; + + double t = 0.5; + double t_step = 0.25; + do { + // Bernstein polynoms + double B[4] = {1, 3, 3, 1}; + double oneMinTExp = 1; + double tExp = 1; + for (int i = 0; i < 4; ++i, tExp *= t) { + B[i] *= tExp; + } + for (int i = 0; i < 4; ++i, oneMinTExp *= 1 - t) { + B[4 - i - 1] *= oneMinTExp; + } + double const x = p0.X * B[0] + p1.X * B[1] + p2.X * B[2] + p3.X * B[3]; + double const y = p0.Y * B[0] + p1.Y * B[1] + p2.Y * B[2] + p3.Y * B[3]; + if (fabs(target - x) < allowed_error) { + return y; + } + if (x > target) { + t -= t_step; + } + else { + t += t_step; } + t_step /= 2; + } while (true); + } + + + double InterpolateBetween(Point const & left, Point const & right, double target, double allowed_error) { + assert(left.co.X < target); + assert(target <= right.co.X); + switch (right.interpolation) { + case CONSTANT: return left.co.Y; + case LINEAR: return InterpolateLinearCurve(left, right, target); + case BEZIER: return InterpolateBezierCurve(left, right, target, allowed_error); } + } + - // swap items - if (smallest_index != compare_index) { - swap(Points[compare_index], Points[smallest_index]); + template + int64_t SearchBetweenPoints(Point const & left, Point const & right, int64_t const current, Check check) { + int64_t start = left.co.X; + int64_t stop = right.co.X; + while (start < stop) { + int64_t const mid = (start + stop + 1) / 2; + double const value = InterpolateBetween(left, right, mid, 0.01); + if (check(round(value), current)) { + start = mid; + } else { + stop = mid - 1; + } } + return start; } } -// Constructor which sets the default point & coordinate at X=0 -Keyframe::Keyframe(double value) : needs_update(true) { - // Init the factorial table, needed by bezier curves - CreateFactorialTable(); +// Constructor which sets the default point & coordinate at X=1 +Keyframe::Keyframe(double value) { // Add initial point AddPoint(Point(value)); } -// Keyframe constructor -Keyframe::Keyframe() : needs_update(true) { - // Init the factorial table, needed by bezier curves - CreateFactorialTable(); -} - // Add a new point on the key-frame. Each point has a primary coordinate, // a left handle, and a right handle. void Keyframe::AddPoint(Point p) { - // mark as dirty - needs_update = true; - - // Check for duplicate point (and remove it) - Point closest = GetClosestPoint(p); - if (closest.co.X == p.co.X) - // Remove existing point - RemovePoint(closest); - - // Add point at correct spot - Points.push_back(p); - - // Sort / Re-order points based on X coordinate - ReorderPoints(); + // candidate is not less (greater or equal) than the new point in + // the X coordinate. + std::vector::iterator candidate = + std::lower_bound(begin(Points), end(Points), p.co.X, IsPointBeforeX); + if (candidate == end(Points)) { + // New point X is greater than all other points' X, add to + // back. + Points.push_back(p); + } else if ((*candidate).co.X == p.co.X) { + // New point is at same X coordinate as some point, overwrite + // point. + *candidate = p; + } else { + // New point needs to be inserted before candidate; thus move + // candidate and all following one to the right and insert new + // point then where candidate was. + size_t const candidate_index = candidate - begin(Points); + Points.push_back(p); // Make space; could also be a dummy point. INVALIDATES candidate! + std::move_backward(begin(Points) + candidate_index, end(Points) - 1, end(Points)); + Points[candidate_index] = p; + } } // Add a new point on the key-frame, with some defaults set (BEZIER) @@ -107,9 +167,9 @@ } // Get the index of a point by matching a coordinate -int64_t Keyframe::FindIndex(Point p) { +int64_t Keyframe::FindIndex(Point p) const { // loop through points, and find a matching coordinate - for (int64_t x = 0; x < Points.size(); x++) { + for (std::vector::size_type x = 0; x < Points.size(); x++) { // Get each point Point existing_point = Points[x]; @@ -125,67 +185,49 @@ } // Determine if point already exists -bool Keyframe::Contains(Point p) { - // loop through points, and find a matching coordinate - for (int64_t x = 0; x < Points.size(); x++) { - // Get each point - Point existing_point = Points[x]; - - // find a match - if (p.co.X == existing_point.co.X) { - // Remove the matching point, and break out of loop - return true; - } - } - - // no matching point found - return false; +bool Keyframe::Contains(Point p) const { + std::vector::const_iterator i = + std::lower_bound(begin(Points), end(Points), p.co.X, IsPointBeforeX); + return i != end(Points) && i->co.X == p.co.X; } // Get current point (or closest point) from the X coordinate (i.e. the frame number) -Point Keyframe::GetClosestPoint(Point p, bool useLeft) { - Point closest(-1, -1); - - // loop through points, and find a matching coordinate - for (int64_t x = 0; x < Points.size(); x++) { - // Get each point - Point existing_point = Points[x]; - - // find a match - if (existing_point.co.X >= p.co.X && !useLeft) { - // New closest point found (to the Right) - closest = existing_point; - break; - } else if (existing_point.co.X < p.co.X && useLeft) { - // New closest point found (to the Left) - closest = existing_point; - } else if (existing_point.co.X >= p.co.X && useLeft) { - // We've gone past the left point... so break - break; - } +Point Keyframe::GetClosestPoint(Point p, bool useLeft) const { + if (Points.size() == 0) { + return Point(-1, -1); } - // Handle edge cases (if no point was found) - if (closest.co.X == -1) { - if (p.co.X <= 1 && Points.size() > 0) - // Assign 1st point - closest = Points[0]; - else if (Points.size() > 0) - // Assign last point - closest = Points[Points.size() - 1]; + // Finds a point with an X coordinate which is "not less" (greater + // or equal) than the queried X coordinate. + std::vector::const_iterator candidate = + std::lower_bound(begin(Points), end(Points), p.co.X, IsPointBeforeX); + + if (candidate == end(Points)) { + // All points are before the queried point. + // + // Note: Behavior the same regardless of useLeft! + return Points.back(); + } + if (candidate == begin(Points)) { + // First point is greater or equal to the queried point. + // + // Note: Behavior the same regardless of useLeft! + return Points.front(); + } + if (useLeft) { + return *(candidate - 1); + } else { + return *candidate; } - - // no matching point found - return closest; } // Get current point (or closest point to the right) from the X coordinate (i.e. the frame number) -Point Keyframe::GetClosestPoint(Point p) { +Point Keyframe::GetClosestPoint(Point p) const { return GetClosestPoint(p, false); } // Get previous point (if any) -Point Keyframe::GetPreviousPoint(Point p) { +Point Keyframe::GetPreviousPoint(Point p) const { // Lookup the index of this point try { @@ -197,24 +239,18 @@ else return Points[0]; - } catch (OutOfBoundsPoint) { + } catch (const OutOfBoundsPoint& e) { // No previous point return Point(-1, -1); } } // Get max point (by Y coordinate) -Point Keyframe::GetMaxPoint() { +Point Keyframe::GetMaxPoint() const { Point maxPoint(-1, -1); - // loop through points, and find the largest Y value - for (int64_t x = 0; x < Points.size(); x++) { - // Get each point - Point existing_point = Points[x]; - - // Is point larger than max point + for (Point const & existing_point: Points) { if (existing_point.co.Y >= maxPoint.co.Y) { - // New max point found maxPoint = existing_point; } } @@ -223,111 +259,81 @@ } // Get the value at a specific index -double Keyframe::GetValue(int64_t index) -{ - // Check if it needs to be processed - if (needs_update) - Process(); +double Keyframe::GetValue(int64_t index) const { + if (Points.empty()) { + return 0; + } + std::vector::const_iterator candidate = + std::lower_bound(begin(Points), end(Points), static_cast(index), IsPointBeforeX); - // Is index a valid point? - if (index >= 0 && index < Values.size()) - // Return value - return Values[index].Y; - else if (index < 0 && Values.size() > 0) - // Return the minimum value - return Values[0].Y; - else if (index >= Values.size() && Values.size() > 0) - // return the maximum value - return Values[Values.size() - 1].Y; - else - // return a blank coordinate (0,0) - return 0.0; + if (candidate == end(Points)) { + // index is behind last point + return Points.back().co.Y; + } + if (candidate == begin(Points)) { + // index is at or before first point + return Points.front().co.Y; + } + if (candidate->co.X == index) { + // index is directly on a point + return candidate->co.Y; + } + std::vector::const_iterator predecessor = candidate - 1; + return InterpolateBetween(*predecessor, *candidate, index, 0.01); } // Get the rounded INT value at a specific index -int Keyframe::GetInt(int64_t index) -{ - // Check if it needs to be processed - if (needs_update) - Process(); - - // Is index a valid point? - if (index >= 0 && index < Values.size()) - // Return value - return int(round(Values[index].Y)); - else if (index < 0 && Values.size() > 0) - // Return the minimum value - return int(round(Values[0].Y)); - else if (index >= Values.size() && Values.size() > 0) - // return the maximum value - return int(round(Values[Values.size() - 1].Y)); - else - // return a blank coordinate (0,0) - return 0; +int Keyframe::GetInt(int64_t index) const { + return int(round(GetValue(index))); } // Get the rounded INT value at a specific index -int64_t Keyframe::GetLong(int64_t index) -{ - // Check if it needs to be processed - if (needs_update) - Process(); - - // Is index a valid point? - if (index >= 0 && index < Values.size()) - // Return value - return long(round(Values[index].Y)); - else if (index < 0 && Values.size() > 0) - // Return the minimum value - return long(round(Values[0].Y)); - else if (index >= Values.size() && Values.size() > 0) - // return the maximum value - return long(round(Values[Values.size() - 1].Y)); - else - // return a blank coordinate (0,0) - return 0; +int64_t Keyframe::GetLong(int64_t index) const { + return long(round(GetValue(index))); } // Get the direction of the curve at a specific index (increasing or decreasing) -bool Keyframe::IsIncreasing(int index) +bool Keyframe::IsIncreasing(int index) const { - // Check if it needs to be processed - if (needs_update) - Process(); - - // Is index a valid point? - if (index >= 0 && index < Values.size()) - // Return value - return long(round(Values[index].IsIncreasing())); - else if (index < 0 && Values.size() > 0) - // Return the minimum value - return long(round(Values[0].IsIncreasing())); - else if (index >= Values.size() && Values.size() > 0) - // return the maximum value - return long(round(Values[Values.size() - 1].IsIncreasing())); - else - // return the default direction of most curves (i.e. increasing is true) + if (index < 1 || (index + 1) >= GetLength()) { return true; + } + std::vector::const_iterator candidate = + std::lower_bound(begin(Points), end(Points), static_cast(index), IsPointBeforeX); + if (candidate == end(Points)) { + return false; // After the last point, thus constant. + } + if ((candidate->co.X == index) || (candidate == begin(Points))) { + ++candidate; + } + int64_t const value = GetLong(index); + do { + if (value < round(candidate->co.Y)) { + return true; + } else if (value > round(candidate->co.Y)) { + return false; + } + ++candidate; + } while (candidate != end(Points)); + return false; } // Generate JSON string of this object -string Keyframe::Json() { +std::string Keyframe::Json() const { // Return formatted string return JsonValue().toStyledString(); } -// Generate Json::JsonValue for this object -Json::Value Keyframe::JsonValue() { +// Generate Json::Value for this object +Json::Value Keyframe::JsonValue() const { // Create root json object Json::Value root; root["Points"] = Json::Value(Json::arrayValue); - // loop through points, and find a matching coordinate - for (int x = 0; x < Points.size(); x++) { - // Get each point - Point existing_point = Points[x]; + // loop through points + for (const auto existing_point : Points) { root["Points"].append(existing_point.JsonValue()); } @@ -336,43 +342,30 @@ } // Load JSON string into this object -void Keyframe::SetJson(string value) { +void Keyframe::SetJson(const std::string value) { // Parse JSON string into JSON objects - Json::Value root; - Json::Reader reader; - bool success = reader.parse( value, root ); - if (!success) - // Raise exception - throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); - try { + const Json::Value root = openshot::stringToJson(value); // Set all values that match SetJsonValue(root); } - catch (exception e) + catch (const std::exception& e) { // Error parsing JSON (or missing keys) - throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); } } -// Load Json::JsonValue into this object -void Keyframe::SetJsonValue(Json::Value root) { - - // mark as dirty - needs_update = true; - +// Load Json::Value into this object +void Keyframe::SetJsonValue(const Json::Value root) { // Clear existing points Points.clear(); if (!root["Points"].isNull()) // loop through points - for (int64_t x = 0; x < root["Points"].size(); x++) { - // Get each point - Json::Value existing_point = root["Points"][(Json::UInt) x]; - + for (const auto existing_point : root["Points"]) { // Create Point Point p; @@ -385,53 +378,121 @@ } // Get the fraction that represents how many times this value is repeated in the curve -Fraction Keyframe::GetRepeatFraction(int64_t index) -{ - // Check if it needs to be processed - if (needs_update) - Process(); - - // Is index a valid point? - if (index >= 0 && index < Values.size()) - // Return value - return Values[index].Repeat(); - else if (index < 0 && Values.size() > 0) - // Return the minimum value - return Values[0].Repeat(); - else if (index >= Values.size() && Values.size() > 0) - // return the maximum value - return Values[Values.size() - 1].Repeat(); - else - // return a blank coordinate (0,0) +// This is depreciated and will be removed soon. +Fraction Keyframe::GetRepeatFraction(int64_t index) const { + // Frame numbers (index) outside of the "defined" range of this + // keyframe result in a 1/1 default value. + if (index < 1 || (index + 1) >= GetLength()) { return Fraction(1,1); + } + assert(Points.size() > 1); // Due to ! ((index + 1) >= GetLength) there are at least two points! + + // First, get the value at the given frame and the closest point + // to the right. + int64_t const current_value = GetLong(index); + std::vector::const_iterator const candidate = + std::lower_bound(begin(Points), end(Points), static_cast(index), IsPointBeforeX); + assert(candidate != end(Points)); // Due to the (index + 1) >= GetLength check above! + + // Calculate how many of the next values are going to be the same: + int64_t next_repeats = 0; + std::vector::const_iterator i = candidate; + // If the index (frame number) is the X coordinate of the closest + // point, then look at the segment to the right; the "current" + // segement is not interesting because we're already at the last + // value of it. + if (i->co.X == index) { + ++i; + } + // Skip over "constant" (when rounded) segments. + bool all_constant = true; + for (; i != end(Points); ++i) { + if (current_value != round(i->co.Y)) { + all_constant = false; + break; + } + } + if (! all_constant) { + // Found a point which defines a segment which will give a + // different value than the current value. This means we + // moved at least one segment to the right, thus we cannot be + // at the first point. + assert(i != begin(Points)); + Point const left = *(i - 1); + Point const right = *i; + int64_t change_at; + if (current_value < round(i->co.Y)) { + change_at = SearchBetweenPoints(left, right, current_value, std::less_equal{}); + } else { + assert(current_value > round(i->co.Y)); + change_at = SearchBetweenPoints(left, right, current_value, std::greater_equal{}); + } + next_repeats = change_at - index; + } else { + // All values to the right are the same! + next_repeats = Points.back().co.X - index; + } + + // Now look to the left, to the previous values. + all_constant = true; + i = candidate; + if (i != begin(Points)) { + // The binary search below assumes i to be the left point; + // candidate is the right point of the current segment + // though. So change this if possible. If this branch is NOT + // taken, then we're at/before the first point and all is + // constant! + --i; + } + int64_t previous_repeats = 0; + // Skip over constant (when rounded) segments! + for (; i != begin(Points); --i) { + if (current_value != round(i->co.Y)) { + all_constant = false; + break; + } + } + // Special case when skipped until the first point, but the first + // point is actually different. Will not happen if index is + // before the first point! + if (current_value != round(i->co.Y)) { + assert(i != candidate); + all_constant = false; + } + if (! all_constant) { + // There are at least two points, and we're not at the end, + // thus the following is safe! + Point const left = *i; + Point const right = *(i + 1); + int64_t change_at; + if (current_value > round(left.co.Y)) { + change_at = SearchBetweenPoints(left, right, current_value, std::less{}); + } else { + assert(current_value < round(left.co.Y)); + change_at = SearchBetweenPoints(left, right, current_value, std::greater{}); + } + previous_repeats = index - change_at; + } else { + // Every previous value is the same (rounded) as the current + // value. + previous_repeats = index; + } + int64_t total_repeats = previous_repeats + next_repeats; + return Fraction(previous_repeats, total_repeats); } // Get the change in Y value (from the previous Y value) -double Keyframe::GetDelta(int64_t index) -{ - // Check if it needs to be processed - if (needs_update) - Process(); - - // Is index a valid point? - if (index >= 0 && index < Values.size()) - // Return value - return Values[index].Delta(); - else if (index < 0 && Values.size() > 0) - // Return the minimum value - return Values[0].Delta(); - else if (index >= Values.size() && Values.size() > 0) - // return the maximum value - return Values[Values.size() - 1].Delta(); - else - // return a blank coordinate (0,0) - return 0.0; +double Keyframe::GetDelta(int64_t index) const { + if (index < 1) return 0; + if (index == 1 && ! Points.empty()) return Points[0].co.Y; + if (index >= GetLength()) return 0; + return GetLong(index) - GetLong(index - 1); } // Get a point at a specific index -Point& Keyframe::GetPoint(int64_t index) { +Point const & Keyframe::GetPoint(int64_t index) const { // Is index a valid point? - if (index >= 0 && index < Points.size()) + if (index >= 0 && index < (int64_t)Points.size()) return Points[index]; else // Invalid index @@ -439,29 +500,22 @@ } // Get the number of values (i.e. coordinates on the X axis) -int64_t Keyframe::GetLength() { - // Check if it needs to be processed - if (needs_update) - Process(); - - // return the size of the Values vector - return Values.size(); +int64_t Keyframe::GetLength() const { + if (Points.empty()) return 0; + if (Points.size() == 1) return 1; + return round(Points.back().co.X) + 1; } // Get the number of points (i.e. # of points) -int64_t Keyframe::GetCount() { +int64_t Keyframe::GetCount() const { - // return the size of the Values vector return Points.size(); } // Remove a point by matching a coordinate void Keyframe::RemovePoint(Point p) { - // mark as dirty - needs_update = true; - // loop through points, and find a matching coordinate - for (int64_t x = 0; x < Points.size(); x++) { + for (std::vector::size_type x = 0; x < Points.size(); x++) { // Get each point Point existing_point = Points[x]; @@ -479,11 +533,8 @@ // Remove a point by index void Keyframe::RemovePoint(int64_t index) { - // mark as dirty - needs_update = true; - // Is index a valid point? - if (index >= 0 && index < Points.size()) + if (index >= 0 && index < (int64_t)Points.size()) { // Remove a specific point by index Points.erase(Points.begin() + index); @@ -494,398 +545,54 @@ } void Keyframe::UpdatePoint(int64_t index, Point p) { - // mark as dirty - needs_update = true; - // Remove matching point RemovePoint(index); // Add new point AddPoint(p); - - // Reorder points - ReorderPoints(); } -void Keyframe::PrintPoints() { - // Check if it needs to be processed - if (needs_update) - Process(); - +void Keyframe::PrintPoints() const { cout << fixed << setprecision(4); - for (vector::iterator it = Points.begin(); it != Points.end(); it++) { + for (std::vector::const_iterator it = Points.begin(); it != Points.end(); it++) { Point p = *it; cout << p.co.X << "\t" << p.co.Y << endl; } } -void Keyframe::PrintValues() { - // Check if it needs to be processed - if (needs_update) - Process(); - +void Keyframe::PrintValues() const { cout << fixed << setprecision(4); - cout << "Frame Number (X)\tValue (Y)\tIs Increasing\tRepeat Numerator\tRepeat Denominator\tDelta (Y Difference)" << endl; - - for (vector::iterator it = Values.begin() + 1; it != Values.end(); it++) { - Coordinate c = *it; - cout << long(round(c.X)) << "\t" << c.Y << "\t" << c.IsIncreasing() << "\t" << c.Repeat().num << "\t" << c.Repeat().den << "\t" << c.Delta() << endl; - } -} + cout << "Frame Number (X)\tValue (Y)\tIs Increasing\tRepeat Numerator\tRepeat Denominator\tDelta (Y Difference)\n"; -void Keyframe::Process() { - - #pragma omp critical (keyframe_process) - { - // only process if needed - if (needs_update && Points.size() == 0) { - // Clear all values - Values.clear(); - } - else if (needs_update && Points.size() > 0) - { - // Clear all values - Values.clear(); - - // fill in all values between 1 and 1st point's co.X - Point p1 = Points[0]; - if (Points.size() > 1) - // Fill in previous X values (before 1st point) - for (int64_t x = 0; x < p1.co.X; x++) - Values.push_back(Coordinate(Values.size(), p1.co.Y)); - else - // Add a single value (since we only have 1 point) - Values.push_back(Coordinate(Values.size(), p1.co.Y)); - - // Loop through each pair of points (1 less than the max points). Each - // pair of points is used to process a segment of the keyframe. - Point p2(0, 0); - for (int64_t x = 0; x < Points.size() - 1; x++) { - p1 = Points[x]; - p2 = Points[x + 1]; - - // process segment p1,p2 - ProcessSegment(x, p1, p2); - } - - // Loop through each Value, and set the direction of the coordinate. This is used - // when time mapping, to determine what direction the audio waveforms play. - bool increasing = true; - int repeat_count = 1; - int64_t last_value = 0; - for (vector::iterator it = Values.begin() + 1; it != Values.end(); it++) { - int current_value = long(round((*it).Y)); - int64_t next_value = long(round((*it).Y)); - int64_t prev_value = long(round((*it).Y)); - if (it + 1 != Values.end()) - next_value = long(round((*(it + 1)).Y)); - if (it - 1 >= Values.begin()) - prev_value = long(round((*(it - 1)).Y)); - - // Loop forward and look for the next unique value (to determine direction) - for (vector::iterator direction_it = it + 1; direction_it != Values.end(); direction_it++) { - int64_t next = long(round((*direction_it).Y)); - - // Detect direction - if (current_value < next) - { - increasing = true; - break; - } - else if (current_value > next) - { - increasing = false; - break; - } - } - - // Set direction - (*it).IsIncreasing(increasing); - - // Detect repeated Y value - if (current_value == last_value) - // repeated, so increment count - repeat_count++; - else - // reset repeat counter - repeat_count = 1; - - // Detect how many 'more' times it's repeated - int additional_repeats = 0; - for (vector::iterator repeat_it = it + 1; repeat_it != Values.end(); repeat_it++) { - int64_t next = long(round((*repeat_it).Y)); - if (next == current_value) - // repeated, so increment count - additional_repeats++; - else - break; // stop looping - } - - // Set repeat fraction - (*it).Repeat(Fraction(repeat_count, repeat_count + additional_repeats)); - - // Set delta (i.e. different from previous unique Y value) - (*it).Delta(current_value - last_value); - - // track the last value - last_value = current_value; - } - } - - // reset flag - needs_update = false; + for (int64_t i = 1; i < GetLength(); ++i) { + cout << i << "\t" << GetValue(i) << "\t" << IsIncreasing(i) << "\t" ; + cout << GetRepeatFraction(i).num << "\t" << GetRepeatFraction(i).den << "\t" << GetDelta(i) << "\n"; } } -void Keyframe::ProcessSegment(int Segment, Point p1, Point p2) { - // Determine the number of values for this segment - int64_t number_of_values = round(p2.co.X) - round(p1.co.X); - - // Exit function if no values - if (number_of_values == 0) - return; - - // Based on the interpolation mode, fill the "values" vector with the coordinates - // for this segment - switch (p2.interpolation) { - - // Calculate the "values" for this segment in with a LINEAR equation, effectively - // creating a straight line with coordinates. - case LINEAR: { - // Get the difference in value - double current_value = p1.co.Y; - double value_difference = p2.co.Y - p1.co.Y; - double value_increment = 0.0f; - - // Get the increment value, but take into account the - // first segment has 1 extra value - value_increment = value_difference / (double) (number_of_values); - - if (Segment == 0) - // Add an extra value to the first segment - number_of_values++; - else - // If not 1st segment, skip the first value - current_value += value_increment; - - // Add each increment to the values vector - for (int64_t x = 0; x < number_of_values; x++) { - // add value as a coordinate to the "values" vector - Values.push_back(Coordinate(Values.size(), current_value)); - - // increment value - current_value += value_increment; - } - - break; - } - - // Calculate the "values" for this segment using a quadratic Bezier curve. This creates a - // smooth curve. - case BEZIER: { - - // Always increase the number of points by 1 (need all possible points - // to correctly calculate the curve). - number_of_values++; - number_of_values *= 4; // We need a higher resolution curve (4X) - - // Diff between points - double X_diff = p2.co.X - p1.co.X; - double Y_diff = p2.co.Y - p1.co.Y; - - vector segment_coordinates; - segment_coordinates.push_back(p1.co); - segment_coordinates.push_back(Coordinate(p1.co.X + (p1.handle_right.X * X_diff), p1.co.Y + (p1.handle_right.Y * Y_diff))); - segment_coordinates.push_back(Coordinate(p1.co.X + (p2.handle_left.X * X_diff), p1.co.Y + (p2.handle_left.Y * Y_diff))); - segment_coordinates.push_back(p2.co); - - vector raw_coordinates; - int64_t npts = segment_coordinates.size(); - int64_t icount, jcount; - double step, t; - double last_x = -1; // small number init, to track the last used x - - // Calculate points on curve - icount = 0; - t = 0; - - step = (double) 1.0 / (number_of_values - 1); - - for (int64_t i1 = 0; i1 < number_of_values; i1++) { - if ((1.0 - t) < 5e-6) - t = 1.0; - - jcount = 0; - - double new_x = 0.0f; - double new_y = 0.0f; - - for (int64_t i = 0; i < npts; i++) { - Coordinate co = segment_coordinates[i]; - double basis = Bernstein(npts - 1, i, t); - new_x += basis * co.X; - new_y += basis * co.Y; - } - - // Add new value to the vector - Coordinate current_value(new_x, new_y); - - // Add all values for 1st segment - raw_coordinates.push_back(current_value); - - // increment counters - icount += 2; - t += step; - } - - // Loop through the raw coordinates, and map them correctly to frame numbers. For example, - // we can't have duplicate X values, since X represents our frame numbers. - int64_t current_frame = p1.co.X; - double current_value = p1.co.Y; - for (int64_t i = 0; i < raw_coordinates.size(); i++) - { - // Get the raw coordinate - Coordinate raw = raw_coordinates[i]; - - if (current_frame == round(raw.X)) - // get value of raw coordinate - current_value = raw.Y; - else - { - // Missing X values (use last known Y values) - int64_t number_of_missing = round(raw.X) - current_frame; - for (int64_t missing = 0; missing < number_of_missing; missing++) - { - // Add new value to the vector - Coordinate new_coord(current_frame, current_value); - - if (Segment == 0 || Segment > 0 && current_frame > p1.co.X) - // Add to "values" vector - Values.push_back(new_coord); - - // Increment frame - current_frame++; - } - - // increment the current value - current_value = raw.Y; - } - } - - // Add final coordinate - Coordinate new_coord(current_frame, current_value); - Values.push_back(new_coord); - - break; - } - - // Calculate the "values" of this segment by maintaining the value of p1 until the - // last point, and then make the value jump to p2. This effectively just jumps - // the value, instead of ramping up or down the value. - case CONSTANT: { - - if (Segment == 0) - // first segment has 1 extra value - number_of_values++; - - // Add each increment to the values vector - for (int64_t x = 0; x < number_of_values; x++) { - if (x < (number_of_values - 1)) { - // Not the last value of this segment - // add coordinate to "values" - Values.push_back(Coordinate(Values.size(), p1.co.Y)); - } else { - // This is the last value of this segment - // add coordinate to "values" - Values.push_back(Coordinate(Values.size(), p2.co.Y)); - } - } - break; - } - - } -} - -// Create lookup table for fast factorial calculation -void Keyframe::CreateFactorialTable() { - // Only 4 lookups are needed, because we only support 4 coordinates per curve - FactorialLookup[0] = 1.0; - FactorialLookup[1] = 1.0; - FactorialLookup[2] = 2.0; - FactorialLookup[3] = 6.0; -} - -// Get a factorial for a coordinate -double Keyframe::Factorial(int64_t n) { - assert(n >= 0 && n <= 3); - return FactorialLookup[n]; /* returns the value n! as a SUMORealing point number */ -} - -// Calculate the factorial function for Bernstein basis -double Keyframe::Ni(int64_t n, int64_t i) { - double ni; - double a1 = Factorial(n); - double a2 = Factorial(i); - double a3 = Factorial(n - i); - ni = a1 / (a2 * a3); - return ni; -} - -// Calculate Bernstein basis -double Keyframe::Bernstein(int64_t n, int64_t i, double t) { - double basis; - double ti; /* t^i */ - double tni; /* (1 - t)^i */ - - /* Prevent problems with pow */ - if (t == 0.0 && i == 0) - ti = 1.0; - else - ti = pow(t, i); - - if (n == i && t == 1.0) - tni = 1.0; - else - tni = pow((1 - t), (n - i)); - - // Bernstein basis - basis = Ni(n, i) * ti * tni; - return basis; -} // Scale all points by a percentage (good for evenly lengthening or shortening an openshot::Keyframe) // 1.0 = same size, 1.05 = 5% increase, etc... void Keyframe::ScalePoints(double scale) { - // Loop through each point (skipping the 1st point) - for (int64_t point_index = 0; point_index < Points.size(); point_index++) { - // Skip the 1st point - if (point_index == 0) - continue; + // TODO: What if scale is small so that two points land on the + // same X coordinate? + // TODO: What if scale < 0? + // Loop through each point (skipping the 1st point) + for (std::vector::size_type point_index = 1; point_index < Points.size(); point_index++) { // Scale X value Points[point_index].co.X = round(Points[point_index].co.X * scale); - - // Mark for re-processing - needs_update = true; } } // Flip all the points in this openshot::Keyframe (useful for reversing an effect or transition, etc...) -void Keyframe::FlipPoints() -{ - // Loop through each point - vector FlippedPoints; - for (int64_t point_index = 0, reverse_index = Points.size() - 1; point_index < Points.size(); point_index++, reverse_index--) { +void Keyframe::FlipPoints() { + for (std::vector::size_type point_index = 0, reverse_index = Points.size() - 1; point_index < reverse_index; point_index++, reverse_index--) { // Flip the points - Point p = Points[point_index]; - p.co.Y = Points[reverse_index].co.Y; - FlippedPoints.push_back(p); + using std::swap; + swap(Points[point_index].co.Y, Points[reverse_index].co.Y); + // TODO: check that this has the desired effect even with + // regards to handles! } - - // Swap vectors - Points.swap(FlippedPoints); - - // Mark for re-processing - needs_update = true; } diff -Nru libopenshot-0.2.2+dfsg1/src/OpenShotVersion.cpp libopenshot-0.2.5+dfsg1/src/OpenShotVersion.cpp --- libopenshot-0.2.2+dfsg1/src/OpenShotVersion.cpp 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/OpenShotVersion.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -0,0 +1,38 @@ +/** + * @file + * @brief Source file for GetVersion function + * @author Jonathan Thomas + * @author FeRD (Frank Dana) + * + * @ref License + */ + +/* LICENSE + * + * Copyright (c) 2008-2019 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#include "OpenShotVersion.h" + +namespace openshot { + OpenShotVersion GetVersion() { + return openshot::Version; + } +} \ No newline at end of file diff -Nru libopenshot-0.2.2+dfsg1/src/PlayerBase.cpp libopenshot-0.2.5+dfsg1/src/PlayerBase.cpp --- libopenshot-0.2.2+dfsg1/src/PlayerBase.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/PlayerBase.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for PlayerBase class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -60,12 +63,12 @@ } // Get the current reader, such as a FFmpegReader -ReaderBase* PlayerBase::Reader() { +openshot::ReaderBase* PlayerBase::Reader() { return reader; } // Set the current reader, such as a FFmpegReader -void PlayerBase::Reader(ReaderBase *new_reader) { +void PlayerBase::Reader(openshot::ReaderBase *new_reader) { reader = new_reader; } diff -Nru libopenshot-0.2.2+dfsg1/src/Point.cpp libopenshot-0.2.5+dfsg1/src/Point.cpp --- libopenshot-0.2.2+dfsg1/src/Point.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/Point.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for Point class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -30,7 +33,7 @@ using namespace std; using namespace openshot; -// Default constructor (defaults to 0,0) +// Default constructor (defaults to 1,0) Point::Point() : interpolation(BEZIER), handle_type(AUTO) { // set new coorinate @@ -40,7 +43,7 @@ Initialize_Handles(); } -// Constructor which creates a single coordinate at X=0 +// Constructor which creates a single coordinate at X=1 Point::Point(float y) : interpolation(CONSTANT), handle_type(AUTO) { // set new coorinate @@ -105,14 +108,14 @@ } // Generate JSON string of this object -string Point::Json() { +std::string Point::Json() const { // Return formatted string return JsonValue().toStyledString(); } -// Generate Json::JsonValue for this object -Json::Value Point::JsonValue() { +// Generate Json::Value for this object +Json::Value Point::JsonValue() const { // Create root json object Json::Value root; @@ -129,30 +132,24 @@ } // Load JSON string into this object -void Point::SetJson(string value) { +void Point::SetJson(const std::string value) { // Parse JSON string into JSON objects - Json::Value root; - Json::Reader reader; - bool success = reader.parse( value, root ); - if (!success) - // Raise exception - throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); - try { + const Json::Value root = openshot::stringToJson(value); // Set all values that match SetJsonValue(root); } - catch (exception e) + catch (const std::exception& e) { // Error parsing JSON (or missing keys) - throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); } } -// Load Json::JsonValue into this object -void Point::SetJsonValue(Json::Value root) { +// Load Json::Value into this object +void Point::SetJsonValue(const Json::Value root) { if (!root["co"].isNull()) co.SetJsonValue(root["co"]); // update coordinate diff -Nru libopenshot-0.2.2+dfsg1/src/Profiles.cpp libopenshot-0.2.5+dfsg1/src/Profiles.cpp --- libopenshot-0.2.2+dfsg1/src/Profiles.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/Profiles.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for Profile class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -32,7 +35,7 @@ // @brief Constructor for Profile. // @param path The folder path / location of a profile file -Profile::Profile(string path) { +Profile::Profile(std::string path) { bool read_file = false; @@ -64,51 +67,51 @@ // Split current line QStringList parts = line.split( "=" ); - string setting = parts[0].toStdString(); - string value = parts[1].toStdString(); + std::string setting = parts[0].toStdString(); + std::string value = parts[1].toStdString(); int value_int = 0; // update struct (based on line number) if (setting == "description") info.description = value; else if (setting == "frame_rate_num") { - stringstream(value) >> value_int; + std::stringstream(value) >> value_int; info.fps.num = value_int; } else if (setting == "frame_rate_den") { - stringstream(value) >> value_int; + std::stringstream(value) >> value_int; info.fps.den = value_int; } else if (setting == "width") { - stringstream(value) >> value_int; + std::stringstream(value) >> value_int; info.width = value_int; } else if (setting == "height") { - stringstream(value) >> value_int; + std::stringstream(value) >> value_int; info.height = value_int; } else if (setting == "progressive") { - stringstream(value) >> value_int; + std::stringstream(value) >> value_int; info.interlaced_frame = !(bool)value_int; } else if (setting == "sample_aspect_num") { - stringstream(value) >> value_int; + std::stringstream(value) >> value_int; info.pixel_ratio.num = value_int; } else if (setting == "sample_aspect_den") { - stringstream(value) >> value_int; + std::stringstream(value) >> value_int; info.pixel_ratio.den = value_int; } else if (setting == "display_aspect_num") { - stringstream(value) >> value_int; + std::stringstream(value) >> value_int; info.display_ratio.num = value_int; } else if (setting == "display_aspect_den") { - stringstream(value) >> value_int; + std::stringstream(value) >> value_int; info.display_ratio.den = value_int; } else if (setting == "colorspace") { - stringstream(value) >> value_int; + std::stringstream(value) >> value_int; info.pixel_format = value_int; } } @@ -117,7 +120,7 @@ } } - catch (exception e) + catch (const std::exception& e) { // Error parsing profile file throw InvalidFile("Profile could not be found or loaded (or is invalid).", path); @@ -130,14 +133,14 @@ } // Generate JSON string of this object -string Profile::Json() { +std::string Profile::Json() const { // Return formatted string return JsonValue().toStyledString(); } -// Generate Json::JsonValue for this object -Json::Value Profile::JsonValue() { +// Generate Json::Value for this object +Json::Value Profile::JsonValue() const { // Create root json object Json::Value root; @@ -160,30 +163,24 @@ } // Load JSON string into this object -void Profile::SetJson(string value) { +void Profile::SetJson(const std::string value) { // Parse JSON string into JSON objects - Json::Value root; - Json::Reader reader; - bool success = reader.parse( value, root ); - if (!success) - // Raise exception - throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); - try { + const Json::Value root = openshot::stringToJson(value); // Set all values that match SetJsonValue(root); } - catch (exception e) + catch (const std::exception& e) { // Error parsing JSON (or missing keys) - throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); } } -// Load Json::JsonValue into this object -void Profile::SetJsonValue(Json::Value root) { +// Load Json::Value into this object +void Profile::SetJsonValue(const Json::Value root) { if (!root["height"].isNull()) info.height = root["height"].asInt(); diff -Nru libopenshot-0.2.2+dfsg1/src/Qt/AudioPlaybackThread.cpp libopenshot-0.2.5+dfsg1/src/Qt/AudioPlaybackThread.cpp --- libopenshot-0.2.2+dfsg1/src/Qt/AudioPlaybackThread.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/Qt/AudioPlaybackThread.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -4,9 +4,12 @@ * @author Duzy Chan * @author Jonathan Thomas * * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -35,18 +38,44 @@ AudioDeviceManagerSingleton *AudioDeviceManagerSingleton::m_pInstance = NULL; // Create or Get an instance of the device manager singleton - AudioDeviceManagerSingleton *AudioDeviceManagerSingleton::Instance(int numChannels) + AudioDeviceManagerSingleton *AudioDeviceManagerSingleton::Instance() { if (!m_pInstance) { // Create the actual instance of device manager only once m_pInstance = new AudioDeviceManagerSingleton; + // Get preferred audio device name (if any) + juce::String preferred_audio_device = juce::String(Settings::Instance()->PLAYBACK_AUDIO_DEVICE_NAME.c_str()); + // Initialize audio device only 1 time - m_pInstance->audioDeviceManager.initialise ( + juce::String audio_error = m_pInstance->audioDeviceManager.initialise ( 0, /* number of input channels */ - numChannels, /* number of output channels */ + 2, /* number of output channels */ 0, /* no XML settings.. */ - true /* select default device on failure */); + true, /* select default device on failure */ + preferred_audio_device /* preferredDefaultDeviceName */); + + // Persist any errors detected + if (audio_error.isNotEmpty()) { + m_pInstance->initialise_error = audio_error.toRawUTF8(); + } else { + m_pInstance->initialise_error = ""; + } + + // Get all audio device names + for (int i = 0; i < m_pInstance->audioDeviceManager.getAvailableDeviceTypes().size(); ++i) + { + const AudioIODeviceType* t = m_pInstance->audioDeviceManager.getAvailableDeviceTypes()[i]; + const juce::StringArray deviceNames = t->getDeviceNames (); + + for (int j = 0; j < deviceNames.size (); ++j ) + { + juce::String deviceName = deviceNames[j]; + juce::String typeName = t->getTypeName(); + openshot::AudioDeviceInfo deviceInfo = {deviceName.toRawUTF8(), typeName.toRawUTF8()}; + m_pInstance->audio_device_names.push_back(deviceInfo); + } + } } return m_pInstance; @@ -63,7 +92,7 @@ // Constructor AudioPlaybackThread::AudioPlaybackThread() - : Thread("audio-playback") + : juce::Thread("audio-playback") , player() , transport() , mixer() @@ -82,7 +111,7 @@ } // Set the reader object - void AudioPlaybackThread::Reader(ReaderBase *reader) { + void AudioPlaybackThread::Reader(openshot::ReaderBase *reader) { if (source) source->Reader(reader); else { @@ -103,10 +132,10 @@ } // Get the current frame object (which is filling the buffer) - std::shared_ptr AudioPlaybackThread::getFrame() + std::shared_ptr AudioPlaybackThread::getFrame() { if (source) return source->getFrame(); - return std::shared_ptr(); + return std::shared_ptr(); } // Get the currently playing frame number @@ -142,7 +171,7 @@ // Start new audio device (or get existing one) // Add callback - AudioDeviceManagerSingleton::Instance(numChannels)->audioDeviceManager.addAudioCallback(&player); + AudioDeviceManagerSingleton::Instance()->audioDeviceManager.addAudioCallback(&player); // Create TimeSliceThread for audio buffering time_thread.startThread(); @@ -175,7 +204,7 @@ transport.setSource(NULL); player.setSource(NULL); - AudioDeviceManagerSingleton::Instance(0)->audioDeviceManager.removeAudioCallback(&player); + AudioDeviceManagerSingleton::Instance()->audioDeviceManager.removeAudioCallback(&player); // Remove source delete source; diff -Nru libopenshot-0.2.2+dfsg1/src/Qt/demo/main.cpp libopenshot-0.2.5+dfsg1/src/Qt/demo/main.cpp --- libopenshot-0.2.2+dfsg1/src/Qt/demo/main.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/Qt/demo/main.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Demo Qt application to test the QtPlayer class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the diff -Nru libopenshot-0.2.2+dfsg1/src/Qt/PlayerDemo.cpp libopenshot-0.2.5+dfsg1/src/Qt/PlayerDemo.cpp --- libopenshot-0.2.2+dfsg1/src/Qt/PlayerDemo.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/Qt/PlayerDemo.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for Demo QtPlayer application * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -36,7 +39,7 @@ , vbox(new QVBoxLayout(this)) , menu(new QMenuBar(this)) , video(new VideoRenderWidget(this)) - , player(new QtPlayer(video->GetRenderer())) + , player(new openshot::QtPlayer(video->GetRenderer())) { setWindowTitle("OpenShot Player"); @@ -95,7 +98,7 @@ } else if (event->key() == Qt::Key_J) { - cout << "BACKWARD" << player->Speed() - 1 << endl; + std::cout << "BACKWARD" << player->Speed() - 1 << std::endl; if (player->Speed() - 1 != 0) player->Speed(player->Speed() - 1); else @@ -105,7 +108,7 @@ player->Play(); } else if (event->key() == Qt::Key_L) { - cout << "FORWARD" << player->Speed() + 1 << endl; + std::cout << "FORWARD" << player->Speed() + 1 << std::endl; if (player->Speed() + 1 != 0) player->Speed(player->Speed() + 1); else @@ -116,19 +119,19 @@ } else if (event->key() == Qt::Key_Left) { - cout << "FRAME STEP -1" << endl; + std::cout << "FRAME STEP -1" << std::endl; if (player->Speed() != 0) player->Speed(0); player->Seek(player->Position() - 1); } else if (event->key() == Qt::Key_Right) { - cout << "FRAME STEP +1" << endl; + std::cout << "FRAME STEP +1" << std::endl; if (player->Speed() != 0) player->Speed(0); player->Seek(player->Position() + 1); } else if (event->key() == Qt::Key_Escape) { - cout << "QUIT PLAYER" << endl; + std::cout << "QUIT PLAYER" << std::endl; QWidget *pWin = QApplication::activeWindow(); pWin->hide(); diff -Nru libopenshot-0.2.2+dfsg1/src/Qt/PlayerPrivate.cpp libopenshot-0.2.5+dfsg1/src/Qt/PlayerPrivate.cpp --- libopenshot-0.2.2+dfsg1/src/Qt/PlayerPrivate.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/Qt/PlayerPrivate.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -4,9 +4,12 @@ * @author Duzy Chan * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -30,22 +33,22 @@ namespace openshot { - // Constructor - PlayerPrivate::PlayerPrivate(RendererBase *rb) - : renderer(rb), Thread("player"), video_position(1), audio_position(0) - , audioPlayback(new AudioPlaybackThread()) - , videoPlayback(new VideoPlaybackThread(rb)) - , videoCache(new VideoCacheThread()) + // Constructor + PlayerPrivate::PlayerPrivate(openshot::RendererBase *rb) + : renderer(rb), Thread("player"), video_position(1), audio_position(0) + , audioPlayback(new openshot::AudioPlaybackThread()) + , videoPlayback(new openshot::VideoPlaybackThread(rb)) + , videoCache(new openshot::VideoCacheThread()) , speed(1), reader(NULL), last_video_position(1) { } // Destructor PlayerPrivate::~PlayerPrivate() { - stopPlayback(1000); - delete audioPlayback; - delete videoCache; - delete videoPlayback; + stopPlayback(1000); + delete audioPlayback; + delete videoCache; + delete videoPlayback; } // Start thread @@ -135,7 +138,7 @@ } // Get the next displayed frame (based on speed and direction) - std::shared_ptr PlayerPrivate::getFrame() + std::shared_ptr PlayerPrivate::getFrame() { try { // Get the next frame (based on speed) @@ -149,41 +152,39 @@ else { // Update cache on which frame was retrieved - videoCache->current_display_frame = video_position; + videoCache->setCurrentFramePosition(video_position); // return frame from reader return reader->GetFrame(video_position); } - } catch (const ReaderClosed & e) { - // ... - } catch (const TooManySeeks & e) { - // ... - } catch (const OutOfBoundsFrame & e) { - // ... - } - return std::shared_ptr(); + } catch (const ReaderClosed & e) { + // ... + } catch (const TooManySeeks & e) { + // ... + } catch (const OutOfBoundsFrame & e) { + // ... + } + return std::shared_ptr(); } // Start video/audio playback bool PlayerPrivate::startPlayback() { - if (video_position < 0) return false; + if (video_position < 0) return false; - stopPlayback(-1); - startThread(1); - return true; + stopPlayback(-1); + startThread(1); + return true; } // Stop video/audio playback void PlayerPrivate::stopPlayback(int timeOutMilliseconds) { - if (isThreadRunning()) stopThread(timeOutMilliseconds); - if (audioPlayback->isThreadRunning() && reader->info.has_audio) audioPlayback->stopThread(timeOutMilliseconds); - if (videoCache->isThreadRunning() && reader->info.has_video) videoCache->stopThread(timeOutMilliseconds); - if (videoPlayback->isThreadRunning() && reader->info.has_video) videoPlayback->stopThread(timeOutMilliseconds); - + if (isThreadRunning()) stopThread(timeOutMilliseconds); + if (audioPlayback->isThreadRunning() && reader->info.has_audio) audioPlayback->stopThread(timeOutMilliseconds); + if (videoCache->isThreadRunning() && reader->info.has_video) videoCache->stopThread(timeOutMilliseconds); + if (videoPlayback->isThreadRunning() && reader->info.has_video) videoPlayback->stopThread(timeOutMilliseconds); } - } diff -Nru libopenshot-0.2.2+dfsg1/src/Qt/VideoCacheThread.cpp libopenshot-0.2.5+dfsg1/src/Qt/VideoCacheThread.cpp --- libopenshot-0.2.2+dfsg1/src/Qt/VideoCacheThread.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/Qt/VideoCacheThread.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for VideoCacheThread class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -26,13 +29,14 @@ */ #include "../../include/Qt/VideoCacheThread.h" +#include namespace openshot { // Constructor VideoCacheThread::VideoCacheThread() : Thread("video-cache"), speed(1), is_playing(false), position(1) - , reader(NULL), max_frames(OPEN_MP_NUM_PROCESSORS * 2), current_display_frame(1) + , reader(NULL), max_frames(std::min(OPEN_MP_NUM_PROCESSORS * 8, 64)), current_display_frame(1) { } @@ -90,7 +94,7 @@ try { if (reader) { - ZmqLogger::Instance()->AppendDebugMethod("VideoCacheThread::run (cache frame)", "position", position, "current_display_frame", current_display_frame, "max_frames", max_frames, "needed_frames", (position - current_display_frame), "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("VideoCacheThread::run (cache frame)", "position", position, "current_display_frame", current_display_frame, "max_frames", max_frames, "needed_frames", (position - current_display_frame)); // Force the frame to be generated reader->GetFrame(position); @@ -102,6 +106,12 @@ // Ignore out of bounds frame exceptions } + // Is cache position behind current display frame? + if (position < current_display_frame) { + // Jump ahead + position = current_display_frame; + } + // Increment frame number position++; } diff -Nru libopenshot-0.2.2+dfsg1/src/Qt/VideoPlaybackThread.cpp libopenshot-0.2.5+dfsg1/src/Qt/VideoPlaybackThread.cpp --- libopenshot-0.2.2+dfsg1/src/Qt/VideoPlaybackThread.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/Qt/VideoPlaybackThread.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -4,9 +4,12 @@ * @author Duzy Chan * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -61,7 +64,7 @@ if (need_render && frame) { // Debug - ZmqLogger::Instance()->AppendDebugMethod("VideoPlaybackThread::run (before render)", "frame->number", frame->number, "need_render", need_render, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("VideoPlaybackThread::run (before render)", "frame->number", frame->number, "need_render", need_render); // Render the frame to the screen renderer->paint(frame); diff -Nru libopenshot-0.2.2+dfsg1/src/Qt/VideoRenderer.cpp libopenshot-0.2.5+dfsg1/src/Qt/VideoRenderer.cpp --- libopenshot-0.2.2+dfsg1/src/Qt/VideoRenderer.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/Qt/VideoRenderer.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for VideoRenderer class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the diff -Nru libopenshot-0.2.2+dfsg1/src/Qt/VideoRenderWidget.cpp libopenshot-0.2.5+dfsg1/src/Qt/VideoRenderWidget.cpp --- libopenshot-0.2.2+dfsg1/src/Qt/VideoRenderWidget.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/Qt/VideoRenderWidget.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for Video RendererWidget class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the diff -Nru libopenshot-0.2.2+dfsg1/src/QtHtmlReader.cpp libopenshot-0.2.5+dfsg1/src/QtHtmlReader.cpp --- libopenshot-0.2.2+dfsg1/src/QtHtmlReader.cpp 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/QtHtmlReader.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -0,0 +1,255 @@ +/** + * @file + * @brief Source file for QtHtmlReader class + * @author Jonathan Thomas + * @author Sergei Kolesov (jediserg) + * @author Jeff Shillitto (jeffski) + * + * @ref License + */ + +/* LICENSE + * + * Copyright (c) 2008-2019 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#include "../include/QtHtmlReader.h" +#include +#include +#include +#include +#include + +using namespace openshot; + +/// Default constructor (blank text) +QtHtmlReader::QtHtmlReader() : width(1024), height(768), x_offset(0), y_offset(0), html(""), css(""), background_color("#000000"), is_open(false), gravity(GRAVITY_CENTER) +{ + // Open and Close the reader, to populate it's attributes (such as height, width, etc...) + Open(); + Close(); +} + +QtHtmlReader::QtHtmlReader(int width, int height, int x_offset, int y_offset, GravityType gravity, std::string html, std::string css, std::string background_color) +: width(width), height(height), x_offset(x_offset), y_offset(y_offset), gravity(gravity), html(html), css(css), background_color(background_color), is_open(false) +{ + // Open and Close the reader, to populate it's attributes (such as height, width, etc...) + Open(); + Close(); +} + +// Open reader +void QtHtmlReader::Open() +{ + // Open reader if not already open + if (!is_open) + { + // create image + image = std::shared_ptr(new QImage(width, height, QImage::Format_RGBA8888)); + image->fill(QColor(background_color.c_str())); + + //start painting + QPainter painter; + if (!painter.begin(image.get())) { + return; + } + + //set background + painter.setBackground(QBrush(background_color.c_str())); + + //draw text + QTextDocument text_document; + + //disable redo/undo stack as not needed + text_document.setUndoRedoEnabled(false); + + //create the HTML/CSS document + text_document.setTextWidth(width); + text_document.setDefaultStyleSheet(css.c_str()); + text_document.setHtml(html.c_str()); + + int td_height = text_document.documentLayout()->documentSize().height(); + + if (gravity == GRAVITY_TOP_LEFT || gravity == GRAVITY_TOP || gravity == GRAVITY_TOP_RIGHT) { + painter.translate(x_offset, y_offset); + } else if (gravity == GRAVITY_LEFT || gravity == GRAVITY_CENTER || gravity == GRAVITY_RIGHT) { + painter.translate(x_offset, (height - td_height) / 2 + y_offset); + } else if (gravity == GRAVITY_BOTTOM_LEFT || gravity == GRAVITY_BOTTOM_RIGHT || gravity == GRAVITY_BOTTOM) { + painter.translate(x_offset, height - td_height + y_offset); + } + + if (gravity == GRAVITY_TOP_LEFT || gravity == GRAVITY_LEFT || gravity == GRAVITY_BOTTOM_LEFT) { + text_document.setDefaultTextOption(QTextOption(Qt::AlignLeft)); + } else if (gravity == GRAVITY_CENTER || gravity == GRAVITY_TOP || gravity == GRAVITY_BOTTOM) { + text_document.setDefaultTextOption(QTextOption(Qt::AlignHCenter)); + } else if (gravity == GRAVITY_TOP_RIGHT || gravity == GRAVITY_RIGHT|| gravity == GRAVITY_BOTTOM_RIGHT) { + text_document.setDefaultTextOption(QTextOption(Qt::AlignRight)); + } + + // Draw image + text_document.drawContents(&painter); + + painter.end(); + + // Update image properties + info.has_audio = false; + info.has_video = true; + info.file_size = 0; + info.vcodec = "QImage"; + info.width = width; + info.height = height; + info.pixel_ratio.num = 1; + info.pixel_ratio.den = 1; + info.duration = 60 * 60 * 1; // 1 hour duration + info.fps.num = 30; + info.fps.den = 1; + info.video_timebase.num = 1; + info.video_timebase.den = 30; + info.video_length = round(info.duration * info.fps.ToDouble()); + + // Calculate the DAR (display aspect ratio) + Fraction size(info.width * info.pixel_ratio.num, info.height * info.pixel_ratio.den); + + // Reduce size fraction + size.Reduce(); + + // Set the ratio based on the reduced fraction + info.display_ratio.num = size.num; + info.display_ratio.den = size.den; + + // Mark as "open" + is_open = true; + } +} + +// Close reader +void QtHtmlReader::Close() +{ + // Close all objects, if reader is 'open' + if (is_open) + { + // Mark as "closed" + is_open = false; + + // Delete the image + image.reset(); + + info.vcodec = ""; + info.acodec = ""; + } +} + +// Get an openshot::Frame object for a specific frame number of this reader. +std::shared_ptr QtHtmlReader::GetFrame(int64_t requested_frame) +{ + if (image) + { + // Create or get frame object + std::shared_ptr image_frame(new Frame(requested_frame, image->size().width(), image->size().height(), background_color, 0, 2)); + + // Add Image data to frame + image_frame->AddImage(image); + + // return frame object + return image_frame; + } else { + // return empty frame + std::shared_ptr image_frame(new Frame(1, 640, 480, background_color, 0, 2)); + + // return frame object + return image_frame; + } + +} + +// Generate JSON string of this object +std::string QtHtmlReader::Json() const { + + // Return formatted string + return JsonValue().toStyledString(); +} + +// Generate Json::Value for this object +Json::Value QtHtmlReader::JsonValue() const { + + // Create root json object + Json::Value root = ReaderBase::JsonValue(); // get parent properties + root["type"] = "QtHtmlReader"; + root["width"] = width; + root["height"] = height; + root["x_offset"] = x_offset; + root["y_offset"] = y_offset; + root["html"] = html; + root["css"] = css; + root["background_color"] = background_color; + root["gravity"] = gravity; + + // return JsonValue + return root; +} + +// Load JSON string into this object +void QtHtmlReader::SetJson(const std::string value) { + + // Parse JSON string into JSON objects + try + { + const Json::Value root = openshot::stringToJson(value); + // Set all values that match + SetJsonValue(root); + } + catch (const std::exception& e) + { + // Error parsing JSON (or missing keys) + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); + } +} + +// Load Json::Value into this object +void QtHtmlReader::SetJsonValue(const Json::Value root) { + + // Set parent data + ReaderBase::SetJsonValue(root); + + // Set data from Json (if key is found) + if (!root["width"].isNull()) + width = root["width"].asInt(); + if (!root["height"].isNull()) + height = root["height"].asInt(); + if (!root["x_offset"].isNull()) + x_offset = root["x_offset"].asInt(); + if (!root["y_offset"].isNull()) + y_offset = root["y_offset"].asInt(); + if (!root["html"].isNull()) + html = root["html"].asString(); + if (!root["css"].isNull()) + css = root["css"].asString(); + if (!root["background_color"].isNull()) + background_color = root["background_color"].asString(); + if (!root["gravity"].isNull()) + gravity = (GravityType) root["gravity"].asInt(); + + // Re-Open path, and re-init everything (if needed) + if (is_open) + { + Close(); + Open(); + } +} diff -Nru libopenshot-0.2.2+dfsg1/src/QtImageReader.cpp libopenshot-0.2.5+dfsg1/src/QtImageReader.cpp --- libopenshot-0.2.2+dfsg1/src/QtImageReader.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/QtImageReader.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for QtImageReader class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -26,40 +29,82 @@ */ #include "../include/QtImageReader.h" +#include "../include/Settings.h" +#include "../include/Clip.h" +#include "../include/CacheMemory.h" +#include +#include +#include + +#if USE_RESVG == 1 + // If defined and found in CMake, utilize the libresvg for parsing + // SVG files and rasterizing them to QImages. + #include "ResvgQt.h" +#endif using namespace openshot; -QtImageReader::QtImageReader(string path) : path(path), is_open(false) +QtImageReader::QtImageReader(std::string path) : path{QString::fromStdString(path)}, is_open(false) { - // Open and Close the reader, to populate it's attributes (such as height, width, etc...) + // Open and Close the reader, to populate its attributes (such as height, width, etc...) Open(); Close(); } -QtImageReader::QtImageReader(string path, bool inspect_reader) : path(path), is_open(false) +QtImageReader::QtImageReader(std::string path, bool inspect_reader) : path{QString::fromStdString(path)}, is_open(false) { - // Open and Close the reader, to populate it's attributes (such as height, width, etc...) + // Open and Close the reader, to populate its attributes (such as height, width, etc...) if (inspect_reader) { Open(); Close(); } } +QtImageReader::~QtImageReader() +{ +} + // Open image file void QtImageReader::Open() { // Open reader if not already open if (!is_open) { - // Attempt to open file - image = std::shared_ptr(new QImage()); - bool success = image->load(QString::fromStdString(path)); + bool success = true; + bool loaded = false; + +#if USE_RESVG == 1 + // If defined and found in CMake, utilize the libresvg for parsing + // SVG files and rasterizing them to QImages. + // Only use resvg for files ending in '.svg' or '.svgz' + if (path.toLower().endsWith(".svg") || path.toLower().endsWith(".svgz")) { + + ResvgRenderer renderer(path); + if (renderer.isValid()) { + + image = std::shared_ptr(new QImage(renderer.defaultSize(), QImage::Format_ARGB32_Premultiplied)); + image->fill(Qt::transparent); + + QPainter p(image.get()); + renderer.render(&p); + p.end(); + loaded = true; + } + } +#endif - if (!success) + if (!loaded) { + // Attempt to open file using Qt's build in image processing capabilities + image = std::shared_ptr(new QImage()); + success = image->load(path); + } + + if (!success) { // raise exception - throw InvalidFile("File could not be opened.", path); + throw InvalidFile("File could not be opened.", path.toStdString()); + } - // Set pixel format + // Convert to proper format image = std::shared_ptr(new QImage(image->convertToFormat(QImage::Format_RGBA8888))); // Update image properties @@ -72,7 +117,7 @@ info.height = image->height(); info.pixel_ratio.num = 1; info.pixel_ratio.den = 1; - info.duration = 60 * 60 * 24; // 24 hour duration + info.duration = 60 * 60 * 1; // 1 hour duration info.fps.num = 30; info.fps.den = 1; info.video_timebase.num = 1; @@ -89,6 +134,10 @@ info.display_ratio.num = size.num; info.display_ratio.den = size.den; + // Set current max size + max_size.setWidth(info.width); + max_size.setHeight(info.height); + // Mark as "open" is_open = true; } @@ -102,7 +151,7 @@ { // Mark as "closed" is_open = false; - + // Delete the image image.reset(); @@ -111,114 +160,159 @@ } } -void QtImageReader::SetMaxSize(int width, int height) -{ - // Determine if we need to scale the image (for performance reasons) - // The timeline passes its size to the clips, which pass their size to the readers, and eventually here - // A max_width/max_height = 0 means do not scale (probably because we are scaling the image larger than 100%) - - // Remove cache that is no longer valid (if needed) - if (cached_image && (cached_image->width() != width && cached_image->height() != height)) - // Expire this cache - cached_image.reset(); - - max_width = width; - max_height = height; -} - // Get an openshot::Frame object for a specific frame number of this reader. std::shared_ptr QtImageReader::GetFrame(int64_t requested_frame) { // Check for open reader (or throw exception) if (!is_open) - throw ReaderClosed("The Image is closed. Call Open() before calling this method.", path); + throw ReaderClosed("The Image is closed. Call Open() before calling this method.", path.toStdString()); - if (max_width != 0 && max_height != 0 && max_width < info.width && max_height < info.height) - { - // Scale image smaller (or use a previous scaled image) - if (!cached_image) { - // Create a scoped lock, allowing only a single thread to run the following code at one time - const GenericScopedLock lock(getFrameCriticalSection); + // Create a scoped lock, allowing only a single thread to run the following code at one time + const GenericScopedLock lock(getFrameCriticalSection); + + // Determine the max size of this source image (based on the timeline's size, the scaling mode, + // and the scaling keyframes). This is a performance improvement, to keep the images as small as possible, + // without losing quality. NOTE: We cannot go smaller than the timeline itself, or the add_layer timeline + // method will scale it back to timeline size before scaling it smaller again. This needs to be fixed in + // the future. + int max_width = Settings::Instance()->MAX_WIDTH; + if (max_width <= 0) + max_width = info.width; + int max_height = Settings::Instance()->MAX_HEIGHT; + if (max_height <= 0) + max_height = info.height; + + Clip* parent = (Clip*) GetClip(); + if (parent) { + if (parent->scale == SCALE_FIT || parent->scale == SCALE_STRETCH) { + // Best fit or Stretch scaling (based on max timeline size * scaling keyframes) + float max_scale_x = parent->scale_x.GetMaxPoint().co.Y; + float max_scale_y = parent->scale_y.GetMaxPoint().co.Y; + max_width = std::max(float(max_width), max_width * max_scale_x); + max_height = std::max(float(max_height), max_height * max_scale_y); + + } else if (parent->scale == SCALE_CROP) { + // Cropping scale mode (based on max timeline size * cropped size * scaling keyframes) + float max_scale_x = parent->scale_x.GetMaxPoint().co.Y; + float max_scale_y = parent->scale_y.GetMaxPoint().co.Y; + QSize width_size(max_width * max_scale_x, + round(max_width / (float(info.width) / float(info.height)))); + QSize height_size(round(max_height / (float(info.height) / float(info.width))), + max_height * max_scale_y); + // respect aspect ratio + if (width_size.width() >= max_width && width_size.height() >= max_height) { + max_width = std::max(max_width, width_size.width()); + max_height = std::max(max_height, width_size.height()); + } + else { + max_width = std::max(max_width, height_size.width()); + max_height = std::max(max_height, height_size.height()); + } + + } else { + // No scaling, use original image size (slower) + max_width = info.width; + max_height = info.height; + } + } + + // Scale image smaller (or use a previous scaled image) + if (!cached_image || (max_size.width() != max_width || max_size.height() != max_height)) { + bool rendered = false; +#if USE_RESVG == 1 + // If defined and found in CMake, utilize the libresvg for parsing + // SVG files and rasterizing them to QImages. + // Only use resvg for files ending in '.svg' or '.svgz' + if (path.toLower().endsWith(".svg") || path.toLower().endsWith(".svgz")) { + + ResvgRenderer renderer(path); + if (renderer.isValid()) { + // Scale SVG size to keep aspect ratio, and fill the max_size as best as possible + QSize svg_size(renderer.defaultSize().width(), renderer.defaultSize().height()); + svg_size.scale(max_width, max_height, Qt::KeepAspectRatio); + + // Create empty QImage + cached_image = std::shared_ptr(new QImage(QSize(svg_size.width(), svg_size.height()), QImage::Format_ARGB32_Premultiplied)); + cached_image->fill(Qt::transparent); + + // Render SVG into QImage + QPainter p(cached_image.get()); + renderer.render(&p); + p.end(); + rendered = true; + } + } +#endif + + if (!rendered) { // We need to resize the original image to a smaller image (for performance reasons) // Only do this once, to prevent tons of unneeded scaling operations cached_image = std::shared_ptr(new QImage(image->scaled(max_width, max_height, Qt::KeepAspectRatio, Qt::SmoothTransformation))); - cached_image = std::shared_ptr(new QImage(cached_image->convertToFormat(QImage::Format_RGBA8888))); } - // Create or get frame object - std::shared_ptr image_frame(new Frame(requested_frame, cached_image->width(), cached_image->height(), "#000000", Frame::GetSamplesPerFrame(requested_frame, info.fps, info.sample_rate, info.channels), info.channels)); - - // Add Image data to frame - image_frame->AddImage(cached_image); + cached_image = std::shared_ptr(new QImage(cached_image->convertToFormat(QImage::Format_RGBA8888))); - // return frame object - return image_frame; + // Set max size (to later determine if max_size is changed) + max_size.setWidth(max_width); + max_size.setHeight(max_height); + } - } else { - // Use original image (higher quality but slower) - // Create or get frame object - std::shared_ptr image_frame(new Frame(requested_frame, info.width, info.height, "#000000", Frame::GetSamplesPerFrame(requested_frame, info.fps, info.sample_rate, info.channels), info.channels)); + // Create or get frame object + std::shared_ptr image_frame(new Frame(requested_frame, cached_image->width(), cached_image->height(), "#000000", Frame::GetSamplesPerFrame(requested_frame, info.fps, info.sample_rate, info.channels), info.channels)); - // Add Image data to frame - image_frame->AddImage(image); + // Add Image data to frame + image_frame->AddImage(cached_image); - // return frame object - return image_frame; - } + // return frame object + return image_frame; } // Generate JSON string of this object -string QtImageReader::Json() { +std::string QtImageReader::Json() const { // Return formatted string return JsonValue().toStyledString(); } -// Generate Json::JsonValue for this object -Json::Value QtImageReader::JsonValue() { +// Generate Json::Value for this object +Json::Value QtImageReader::JsonValue() const { // Create root json object Json::Value root = ReaderBase::JsonValue(); // get parent properties root["type"] = "QtImageReader"; - root["path"] = path; + root["path"] = path.toStdString(); // return JsonValue return root; } // Load JSON string into this object -void QtImageReader::SetJson(string value) { +void QtImageReader::SetJson(const std::string value) { // Parse JSON string into JSON objects - Json::Value root; - Json::Reader reader; - bool success = reader.parse( value, root ); - if (!success) - // Raise exception - throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); - try { + const Json::Value root = openshot::stringToJson(value); // Set all values that match SetJsonValue(root); } - catch (exception e) + catch (const std::exception& e) { // Error parsing JSON (or missing keys) - throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); } } -// Load Json::JsonValue into this object -void QtImageReader::SetJsonValue(Json::Value root) { +// Load Json::Value into this object +void QtImageReader::SetJsonValue(const Json::Value root) { // Set parent data ReaderBase::SetJsonValue(root); // Set data from Json (if key is found) if (!root["path"].isNull()) - path = root["path"].asString(); + path = QString::fromStdString(root["path"].asString()); // Re-Open path, and re-init everything (if needed) if (is_open) diff -Nru libopenshot-0.2.2+dfsg1/src/QtPlayer.cpp libopenshot-0.2.5+dfsg1/src/QtPlayer.cpp --- libopenshot-0.2.2+dfsg1/src/QtPlayer.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/QtPlayer.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -4,9 +4,12 @@ * @author Duzy Chan * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -33,169 +36,189 @@ #include "../include/Qt/PlayerPrivate.h" #include "../include/Qt/VideoRenderer.h" -using namespace openshot; - -QtPlayer::QtPlayer() : PlayerBase(), p(new PlayerPrivate(new VideoRenderer())), threads_started(false) -{ - reader = NULL; -} - -QtPlayer::QtPlayer(RendererBase *rb) : PlayerBase(), p(new PlayerPrivate(rb)), threads_started(false) -{ - reader = NULL; -} - -QtPlayer::~QtPlayer() -{ - if (mode != PLAYBACK_STOPPED) - Stop(); - - delete p; -} - -void QtPlayer::CloseAudioDevice() -{ - // Close audio device (only do this once, when all audio playback is finished) - AudioDeviceManagerSingleton::Instance(0)->CloseAudioDevice(); -} - -void QtPlayer::SetSource(const std::string &source) -{ - FFmpegReader *ffreader = new FFmpegReader(source); - ffreader->DisplayInfo(); - - //reader = new FrameMapper(ffreader, ffreader->info.fps, PULLDOWN_NONE, ffreader->info.sample_rate, ffreader->info.channels, ffreader->info.channel_layout); - reader = new Timeline(ffreader->info.width, ffreader->info.height, ffreader->info.fps, ffreader->info.sample_rate, ffreader->info.channels, ffreader->info.channel_layout); - Clip *c = new Clip(source); - - Timeline* tm = (Timeline*)reader; - tm->AddClip(c); - tm->Open(); - -// ZmqLogger::Instance()->Path("/home/jonathan/.openshot_qt/libopenshot.log"); -// ZmqLogger::Instance()->Enable(true); - - // Set the reader - Reader(reader); -} - -void QtPlayer::Play() -{ - // Set mode to playing, and speed to normal - mode = PLAYBACK_PLAY; - Speed(1); - - if (reader && !threads_started) { - // Start thread only once - p->startPlayback(); - threads_started = true; - } -} - -void QtPlayer::Loading() -{ - mode = PLAYBACK_LOADING; -} - -/// Get the current mode -PlaybackMode QtPlayer::Mode() -{ - return mode; -} - -void QtPlayer::Pause() -{ - mode = PLAYBACK_PAUSED; - Speed(0); -} - -int QtPlayer::Position() -{ - return p->video_position; -} - -void QtPlayer::Seek(int64_t new_frame) -{ - // Check for seek - if (reader && threads_started && new_frame > 0) { - // Notify cache thread that seek has occurred - p->videoCache->Seek(new_frame); - - // Update current position - p->video_position = new_frame; - - // Clear last position (to force refresh) - p->last_video_position = 0; - - // Notify audio thread that seek has occurred - p->audioPlayback->Seek(new_frame); - } -} - -void QtPlayer::Stop() -{ - // Change mode to stopped - mode = PLAYBACK_STOPPED; - - // Notify threads of stopping - if (reader && threads_started) { - p->videoCache->Stop(); - p->audioPlayback->Stop(); - - // Kill all threads - p->stopPlayback(); - } - - p->video_position = 0; - threads_started = false; -} - -// Set the reader object -void QtPlayer::Reader(ReaderBase *new_reader) +namespace openshot { - // Set new reader. Note: Be sure to close and dispose of the old reader after calling this - reader = new_reader; - p->reader = new_reader; - p->videoCache->Reader(new_reader); - p->audioPlayback->Reader(new_reader); -} - -// Get the current reader, such as a FFmpegReader -ReaderBase* QtPlayer::Reader() { - return reader; -} - -// Set the QWidget pointer to display the video on (as a LONG pointer id) -void QtPlayer::SetQWidget(int64_t qwidget_address) { - // Update override QWidget address on the video renderer - p->renderer->OverrideWidget(qwidget_address); -} - -// Get the Renderer pointer address (for Python to cast back into a QObject) -int64_t QtPlayer::GetRendererQObject() { - return (int64_t)(VideoRenderer*)p->renderer; -} - -// Get the Playback speed -float QtPlayer::Speed() { - return speed; -} - -// Set the Playback speed multiplier (1.0 = normal speed, <1.0 = slower, >1.0 faster) -void QtPlayer::Speed(float new_speed) { - speed = new_speed; - p->speed = new_speed; - p->videoCache->setSpeed(new_speed); - if (p->reader->info.has_audio) - p->audioPlayback->setSpeed(new_speed); -} - -// Get the Volume -float QtPlayer::Volume() { - return volume; -} - -// Set the Volume multiplier (1.0 = normal volume, <1.0 = quieter, >1.0 louder) -void QtPlayer::Volume(float new_volume) { - volume = new_volume; -} + // Delegating constructor + QtPlayer::QtPlayer() + : QtPlayer::QtPlayer(new VideoRenderer()) + { } + + // Constructor + QtPlayer::QtPlayer(openshot::RendererBase *rb) + : PlayerBase() + , p(new openshot::PlayerPrivate(rb)) + , threads_started(false) + { + reader = NULL; + } + + QtPlayer::~QtPlayer() + { + if (mode != PLAYBACK_STOPPED) + Stop(); + + delete p; + } + + void QtPlayer::CloseAudioDevice() + { + // Close audio device (only do this once, when all audio playback is finished) + openshot::AudioDeviceManagerSingleton::Instance()->CloseAudioDevice(); + } + + // Return any error string during initialization + std::string QtPlayer::GetError() { + if (reader && threads_started) { + // Get error from audio thread (if any) + return p->audioPlayback->getError(); + } else { + return ""; + } + } + + /// Get Audio Devices from JUCE + std::vector QtPlayer::GetAudioDeviceNames() { + if (reader && threads_started) { + return p->audioPlayback->getAudioDeviceNames(); + } else { + return std::vector(); + } + } + + void QtPlayer::SetSource(const std::string &source) + { + FFmpegReader *ffreader = new FFmpegReader(source); + ffreader->DisplayInfo(); + + reader = new Timeline(ffreader->info.width, ffreader->info.height, ffreader->info.fps, ffreader->info.sample_rate, ffreader->info.channels, ffreader->info.channel_layout); + Clip *c = new Clip(source); + + Timeline* tm = (Timeline*)reader; + tm->AddClip(c); + tm->Open(); + + // Set the reader + Reader(reader); + } + + void QtPlayer::Play() + { + // Set mode to playing, and speed to normal + mode = PLAYBACK_PLAY; + Speed(1); + + if (reader && !threads_started) { + // Start thread only once + p->startPlayback(); + threads_started = true; + } + } + + void QtPlayer::Loading() + { + mode = PLAYBACK_LOADING; + } + + /// Get the current mode + openshot::PlaybackMode QtPlayer::Mode() + { + return mode; + } + + void QtPlayer::Pause() + { + mode = PLAYBACK_PAUSED; + Speed(0); + } + + int64_t QtPlayer::Position() + { + return p->video_position; + } + + void QtPlayer::Seek(int64_t new_frame) + { + // Check for seek + if (reader && threads_started && new_frame > 0) { + // Notify cache thread that seek has occurred + p->videoCache->Seek(new_frame); + + // Update current position + p->video_position = new_frame; + + // Clear last position (to force refresh) + p->last_video_position = 0; + + // Notify audio thread that seek has occurred + p->audioPlayback->Seek(new_frame); + } + } + + void QtPlayer::Stop() + { + // Change mode to stopped + mode = PLAYBACK_STOPPED; + + // Notify threads of stopping + if (reader && threads_started) { + p->videoCache->Stop(); + p->audioPlayback->Stop(); + + // Kill all threads + p->stopPlayback(); + } + + p->video_position = 0; + threads_started = false; + } + + // Set the reader object + void QtPlayer::Reader(openshot::ReaderBase *new_reader) + { + // Set new reader. Note: Be sure to close and dispose of the old reader after calling this + reader = new_reader; + p->reader = new_reader; + p->videoCache->Reader(new_reader); + p->audioPlayback->Reader(new_reader); + } + + // Get the current reader, such as a FFmpegReader + openshot::ReaderBase* QtPlayer::Reader() { + return reader; + } + + // Set the QWidget pointer to display the video on (as a LONG pointer id) + void QtPlayer::SetQWidget(int64_t qwidget_address) { + // Update override QWidget address on the video renderer + p->renderer->OverrideWidget(qwidget_address); + } + + // Get the Renderer pointer address (for Python to cast back into a QObject) + int64_t QtPlayer::GetRendererQObject() { + return (int64_t)(VideoRenderer*)p->renderer; + } + + // Get the Playback speed + float QtPlayer::Speed() { + return speed; + } + + // Set the Playback speed multiplier (1.0 = normal speed, <1.0 = slower, >1.0 faster) + void QtPlayer::Speed(float new_speed) { + speed = new_speed; + p->speed = new_speed; + p->videoCache->setSpeed(new_speed); + if (p->reader->info.has_audio) + p->audioPlayback->setSpeed(new_speed); + } + + // Get the Volume + float QtPlayer::Volume() { + return volume; + } + + // Set the Volume multiplier (1.0 = normal volume, <1.0 = quieter, >1.0 louder) + void QtPlayer::Volume(float new_volume) { + volume = new_volume; + } +} \ No newline at end of file diff -Nru libopenshot-0.2.2+dfsg1/src/QtTextReader.cpp libopenshot-0.2.5+dfsg1/src/QtTextReader.cpp --- libopenshot-0.2.2+dfsg1/src/QtTextReader.cpp 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/QtTextReader.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -0,0 +1,278 @@ +/** + * @file + * @brief Source file for QtTextReader class + * @author Jonathan Thomas + * @author Sergei Kolesov (jediserg) + * @author Jeff Shillitto (jeffski) + * + * @ref License + */ + +/* LICENSE + * + * Copyright (c) 2008-2019 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#include "../include/QtTextReader.h" +#include +#include + +using namespace openshot; + +/// Default constructor (blank text) +QtTextReader::QtTextReader() : width(1024), height(768), x_offset(0), y_offset(0), text(""), font(QFont("Arial", 10)), text_color("#ffffff"), background_color("#000000"), is_open(false), gravity(GRAVITY_CENTER) +{ + // Open and Close the reader, to populate it's attributes (such as height, width, etc...) + Open(); + Close(); +} + +QtTextReader::QtTextReader(int width, int height, int x_offset, int y_offset, GravityType gravity, std::string text, QFont font, std::string text_color, std::string background_color) +: width(width), height(height), x_offset(x_offset), y_offset(y_offset), text(text), font(font), text_color(text_color), background_color(background_color), is_open(false), gravity(gravity) +{ + // Open and Close the reader, to populate it's attributes (such as height, width, etc...) + Open(); + Close(); +} + +void QtTextReader::SetTextBackgroundColor(std::string color) { + text_background_color = color; + + // Open and Close the reader, to populate it's attributes (such as height, width, etc...) plus the text background color + Open(); + Close(); +} + +// Open reader +void QtTextReader::Open() +{ + // Open reader if not already open + if (!is_open) + { + // create image + image = std::shared_ptr(new QImage(width, height, QImage::Format_RGBA8888)); + image->fill(QColor(background_color.c_str())); + + QPainter painter; + if (!painter.begin(image.get())) { + return; + } + + // set background + if (!text_background_color.empty()) { + painter.setBackgroundMode(Qt::OpaqueMode); + painter.setBackground(QBrush(text_background_color.c_str())); + } + + // set font color + painter.setPen(QPen(text_color.c_str())); + + // set font + painter.setFont(font); + + // Set gravity (map between OpenShot and Qt) + int align_flag = 0; + switch (gravity) + { + case GRAVITY_TOP_LEFT: + align_flag = Qt::AlignLeft | Qt::AlignTop; + break; + case GRAVITY_TOP: + align_flag = Qt::AlignHCenter | Qt::AlignTop; + break; + case GRAVITY_TOP_RIGHT: + align_flag = Qt::AlignRight | Qt::AlignTop; + break; + case GRAVITY_LEFT: + align_flag = Qt::AlignVCenter | Qt::AlignLeft; + break; + case GRAVITY_CENTER: + align_flag = Qt::AlignCenter; + break; + case GRAVITY_RIGHT: + align_flag = Qt::AlignVCenter | Qt::AlignRight; + break; + case GRAVITY_BOTTOM_LEFT: + align_flag = Qt::AlignLeft | Qt::AlignBottom; + break; + case GRAVITY_BOTTOM: + align_flag = Qt::AlignHCenter | Qt::AlignBottom; + break; + case GRAVITY_BOTTOM_RIGHT: + align_flag = Qt::AlignRight | Qt::AlignBottom; + break; + } + + // Draw image + painter.drawText(x_offset, y_offset, width, height, align_flag, text.c_str()); + + painter.end(); + + // Update image properties + info.has_audio = false; + info.has_video = true; + info.file_size = 0; + info.vcodec = "QImage"; + info.width = width; + info.height = height; + info.pixel_ratio.num = 1; + info.pixel_ratio.den = 1; + info.duration = 60 * 60 * 1; // 1 hour duration + info.fps.num = 30; + info.fps.den = 1; + info.video_timebase.num = 1; + info.video_timebase.den = 30; + info.video_length = round(info.duration * info.fps.ToDouble()); + + // Calculate the DAR (display aspect ratio) + Fraction font_size(info.width * info.pixel_ratio.num, info.height * info.pixel_ratio.den); + + // Reduce size fraction + font_size.Reduce(); + + // Set the ratio based on the reduced fraction + info.display_ratio.num = font_size.num; + info.display_ratio.den = font_size.den; + + // Mark as "open" + is_open = true; + } +} + +// Close reader +void QtTextReader::Close() +{ + // Close all objects, if reader is 'open' + if (is_open) + { + // Mark as "closed" + is_open = false; + + // Delete the image + image.reset(); + + info.vcodec = ""; + info.acodec = ""; + } +} + +// Get an openshot::Frame object for a specific frame number of this reader. +std::shared_ptr QtTextReader::GetFrame(int64_t requested_frame) +{ + if (image) + { + // Create or get frame object + std::shared_ptr image_frame(new Frame(requested_frame, image->size().width(), image->size().height(), background_color, 0, 2)); + + // Add Image data to frame + image_frame->AddImage(image); + + // return frame object + return image_frame; + } else { + // return empty frame + std::shared_ptr image_frame(new Frame(1, 640, 480, background_color, 0, 2)); + + // return frame object + return image_frame; + } + +} + +// Generate JSON string of this object +std::string QtTextReader::Json() const { + + // Return formatted string + return JsonValue().toStyledString(); +} + +// Generate Json::Value for this object +Json::Value QtTextReader::JsonValue() const { + + // Create root json object + Json::Value root = ReaderBase::JsonValue(); // get parent properties + root["type"] = "QtTextReader"; + root["width"] = width; + root["height"] = height; + root["x_offset"] = x_offset; + root["y_offset"] = y_offset; + root["text"] = text; + root["font"] = font.toString().toStdString(); + root["text_color"] = text_color; + root["background_color"] = background_color; + root["text_background_color"] = text_background_color; + root["gravity"] = gravity; + + // return JsonValue + return root; +} + +// Load JSON string into this object +void QtTextReader::SetJson(const std::string value) { + + // Parse JSON string into JSON objects + try + { + const Json::Value root = openshot::stringToJson(value); + // Set all values that match + SetJsonValue(root); + } + catch (const std::exception& e) + { + // Error parsing JSON (or missing keys) + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); + } +} + +// Load Json::Value into this object +void QtTextReader::SetJsonValue(const Json::Value root) { + + // Set parent data + ReaderBase::SetJsonValue(root); + + // Set data from Json (if key is found) + if (!root["width"].isNull()) + width = root["width"].asInt(); + if (!root["height"].isNull()) + height = root["height"].asInt(); + if (!root["x_offset"].isNull()) + x_offset = root["x_offset"].asInt(); + if (!root["y_offset"].isNull()) + y_offset = root["y_offset"].asInt(); + if (!root["text"].isNull()) + text = root["text"].asString(); + if (!root["font"].isNull()) + font.fromString(QString::fromStdString(root["font"].asString())); + if (!root["text_color"].isNull()) + text_color = root["text_color"].asString(); + if (!root["background_color"].isNull()) + background_color = root["background_color"].asString(); + if (!root["text_background_color"].isNull()) + text_background_color = root["text_background_color"].asString(); + if (!root["gravity"].isNull()) + gravity = (GravityType) root["gravity"].asInt(); + + // Re-Open path, and re-init everything (if needed) + if (is_open) + { + Close(); + Open(); + } +} diff -Nru libopenshot-0.2.2+dfsg1/src/ReaderBase.cpp libopenshot-0.2.5+dfsg1/src/ReaderBase.cpp --- libopenshot-0.2.2+dfsg1/src/ReaderBase.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/ReaderBase.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for ReaderBase class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -58,59 +61,59 @@ info.channel_layout = LAYOUT_MONO; info.audio_stream_index = -1; info.audio_timebase = Fraction(); - max_width = 0; - max_height = 0; + + // Init parent clip + parent = NULL; } // Display file information void ReaderBase::DisplayInfo() { - cout << fixed << setprecision(2) << boolalpha; - cout << "----------------------------" << endl; - cout << "----- File Information -----" << endl; - cout << "----------------------------" << endl; - cout << "--> Has Video: " << info.has_video << endl; - cout << "--> Has Audio: " << info.has_audio << endl; - cout << "--> Has Single Image: " << info.has_single_image << endl; - cout << "--> Duration: " << info.duration << " Seconds" << endl; - cout << "--> File Size: " << double(info.file_size) / 1024 / 1024 << " MB" << endl; - cout << "----------------------------" << endl; - cout << "----- Video Attributes -----" << endl; - cout << "----------------------------" << endl; - cout << "--> Width: " << info.width << endl; - cout << "--> Height: " << info.height << endl; - cout << "--> Pixel Format: " << info.pixel_format << endl; - cout << "--> Frames Per Second: " << info.fps.ToDouble() << " (" << info.fps.num << "/" << info.fps.den << ")" << endl; - cout << "--> Video Bit Rate: " << info.video_bit_rate/1000 << " kb/s" << endl; - cout << "--> Pixel Ratio: " << info.pixel_ratio.ToDouble() << " (" << info.pixel_ratio.num << "/" << info.pixel_ratio.den << ")" << endl; - cout << "--> Display Aspect Ratio: " << info.display_ratio.ToDouble() << " (" << info.display_ratio.num << "/" << info.display_ratio.den << ")" << endl; - cout << "--> Video Codec: " << info.vcodec << endl; - cout << "--> Video Length: " << info.video_length << " Frames" << endl; - cout << "--> Video Stream Index: " << info.video_stream_index << endl; - cout << "--> Video Timebase: " << info.video_timebase.ToDouble() << " (" << info.video_timebase.num << "/" << info.video_timebase.den << ")" << endl; - cout << "--> Interlaced: " << info.interlaced_frame << endl; - cout << "--> Interlaced: Top Field First: " << info.top_field_first << endl; - cout << "----------------------------" << endl; - cout << "----- Audio Attributes -----" << endl; - cout << "----------------------------" << endl; - cout << "--> Audio Codec: " << info.acodec << endl; - cout << "--> Audio Bit Rate: " << info.audio_bit_rate/1000 << " kb/s" << endl; - cout << "--> Sample Rate: " << info.sample_rate << " Hz" << endl; - cout << "--> # of Channels: " << info.channels << endl; - cout << "--> Channel Layout: " << info.channel_layout << endl; - cout << "--> Audio Stream Index: " << info.audio_stream_index << endl; - cout << "--> Audio Timebase: " << info.audio_timebase.ToDouble() << " (" << info.audio_timebase.num << "/" << info.audio_timebase.den << ")" << endl; - cout << "----------------------------" << endl; - cout << "--------- Metadata ---------" << endl; - cout << "----------------------------" << endl; + std::cout << std::fixed << std::setprecision(2) << std::boolalpha; + std::cout << "----------------------------" << std::endl; + std::cout << "----- File Information -----" << std::endl; + std::cout << "----------------------------" << std::endl; + std::cout << "--> Has Video: " << info.has_video << std::endl; + std::cout << "--> Has Audio: " << info.has_audio << std::endl; + std::cout << "--> Has Single Image: " << info.has_single_image << std::endl; + std::cout << "--> Duration: " << info.duration << " Seconds" << std::endl; + std::cout << "--> File Size: " << double(info.file_size) / 1024 / 1024 << " MB" << std::endl; + std::cout << "----------------------------" << std::endl; + std::cout << "----- Video Attributes -----" << std::endl; + std::cout << "----------------------------" << std::endl; + std::cout << "--> Width: " << info.width << std::endl; + std::cout << "--> Height: " << info.height << std::endl; + std::cout << "--> Pixel Format: " << info.pixel_format << std::endl; + std::cout << "--> Frames Per Second: " << info.fps.ToDouble() << " (" << info.fps.num << "/" << info.fps.den << ")" << std::endl; + std::cout << "--> Video Bit Rate: " << info.video_bit_rate/1000 << " kb/s" << std::endl; + std::cout << "--> Pixel Ratio: " << info.pixel_ratio.ToDouble() << " (" << info.pixel_ratio.num << "/" << info.pixel_ratio.den << ")" << std::endl; + std::cout << "--> Display Aspect Ratio: " << info.display_ratio.ToDouble() << " (" << info.display_ratio.num << "/" << info.display_ratio.den << ")" << std::endl; + std::cout << "--> Video Codec: " << info.vcodec << std::endl; + std::cout << "--> Video Length: " << info.video_length << " Frames" << std::endl; + std::cout << "--> Video Stream Index: " << info.video_stream_index << std::endl; + std::cout << "--> Video Timebase: " << info.video_timebase.ToDouble() << " (" << info.video_timebase.num << "/" << info.video_timebase.den << ")" << std::endl; + std::cout << "--> Interlaced: " << info.interlaced_frame << std::endl; + std::cout << "--> Interlaced: Top Field First: " << info.top_field_first << std::endl; + std::cout << "----------------------------" << std::endl; + std::cout << "----- Audio Attributes -----" << std::endl; + std::cout << "----------------------------" << std::endl; + std::cout << "--> Audio Codec: " << info.acodec << std::endl; + std::cout << "--> Audio Bit Rate: " << info.audio_bit_rate/1000 << " kb/s" << std::endl; + std::cout << "--> Sample Rate: " << info.sample_rate << " Hz" << std::endl; + std::cout << "--> # of Channels: " << info.channels << std::endl; + std::cout << "--> Channel Layout: " << info.channel_layout << std::endl; + std::cout << "--> Audio Stream Index: " << info.audio_stream_index << std::endl; + std::cout << "--> Audio Timebase: " << info.audio_timebase.ToDouble() << " (" << info.audio_timebase.num << "/" << info.audio_timebase.den << ")" << std::endl; + std::cout << "----------------------------" << std::endl; + std::cout << "--------- Metadata ---------" << std::endl; + std::cout << "----------------------------" << std::endl; // Iterate through metadata - map::iterator it; - for (it = info.metadata.begin(); it != info.metadata.end(); it++) - cout << "--> " << it->first << ": " << it->second << endl; + for (auto it : info.metadata) + std::cout << "--> " << it.first << ": " << it.second << std::endl; } -// Generate Json::JsonValue for this object -Json::Value ReaderBase::JsonValue() { +// Generate Json::Value for this object +Json::Value ReaderBase::JsonValue() const { // Create root json object Json::Value root; @@ -118,7 +121,7 @@ root["has_audio"] = info.has_audio; root["has_single_image"] = info.has_single_image; root["duration"] = info.duration; - stringstream filesize_stream; + std::stringstream filesize_stream; filesize_stream << info.file_size; root["file_size"] = filesize_stream.str(); root["height"] = info.height; @@ -135,7 +138,7 @@ root["display_ratio"]["num"] = info.display_ratio.num; root["display_ratio"]["den"] = info.display_ratio.den; root["vcodec"] = info.vcodec; - stringstream video_length_stream; + std::stringstream video_length_stream; video_length_stream << info.video_length; root["video_length"] = video_length_stream.str(); root["video_stream_index"] = info.video_stream_index; @@ -156,16 +159,16 @@ // Append metadata map root["metadata"] = Json::Value(Json::objectValue); - map::iterator it; - for (it = info.metadata.begin(); it != info.metadata.end(); it++) - root["metadata"][it->first] = it->second; + + for (const auto it : info.metadata) + root["metadata"][it.first] = it.second; // return JsonValue return root; } -// Load Json::JsonValue into this object -void ReaderBase::SetJsonValue(Json::Value root) { +// Load Json::Value into this object +void ReaderBase::SetJsonValue(const Json::Value root) { // Set data from Json (if key is found) if (!root["has_video"].isNull()) @@ -177,7 +180,7 @@ if (!root["duration"].isNull()) info.duration = root["duration"].asDouble(); if (!root["file_size"].isNull()) - info.file_size = atoll(root["file_size"].asString().c_str()); + info.file_size = std::stoll(root["file_size"].asString()); if (!root["height"].isNull()) info.height = root["height"].asInt(); if (!root["width"].isNull()) @@ -207,7 +210,7 @@ if (!root["vcodec"].isNull()) info.vcodec = root["vcodec"].asString(); if (!root["video_length"].isNull()) - info.video_length = atoll(root["video_length"].asString().c_str()); + info.video_length = std::stoll(root["video_length"].asString()); if (!root["video_stream_index"].isNull()) info.video_stream_index = root["video_stream_index"].asInt(); if (!root["video_timebase"].isNull() && root["video_timebase"].isObject()) { @@ -240,9 +243,19 @@ info.audio_timebase.den = root["audio_timebase"]["den"].asInt(); } if (!root["metadata"].isNull() && root["metadata"].isObject()) { - for( Json::Value::iterator itr = root["metadata"].begin() ; itr != root["metadata"].end() ; itr++ ) { - string key = itr.key().asString(); + for( Json::Value::const_iterator itr = root["metadata"].begin() ; itr != root["metadata"].end() ; itr++ ) { + std::string key = itr.key().asString(); info.metadata[key] = root["metadata"][key].asString(); } } } + +/// Parent clip object of this reader (which can be unparented and NULL) +openshot::ClipBase* ReaderBase::GetClip() { + return parent; +} + +/// Set parent clip object of this reader +void ReaderBase::SetClip(openshot::ClipBase* clip) { + parent = clip; +} diff -Nru libopenshot-0.2.2+dfsg1/src/RendererBase.cpp libopenshot-0.2.5+dfsg1/src/RendererBase.cpp --- libopenshot-0.2.2+dfsg1/src/RendererBase.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/RendererBase.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for RendererBase class * @author Duzy Chan * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the diff -Nru libopenshot-0.2.2+dfsg1/src/Settings.cpp libopenshot-0.2.5+dfsg1/src/Settings.cpp --- libopenshot-0.2.2+dfsg1/src/Settings.cpp 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/Settings.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -0,0 +1,61 @@ +/** + * @file + * @brief Source file for global Settings class + * @author Jonathan Thomas + * + * @ref License + */ + +/* LICENSE + * + * Copyright (c) 2008-2019 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#include "../include/Settings.h" + +using namespace std; +using namespace openshot; + + +// Global reference to logger +Settings *Settings::m_pInstance = NULL; + +// Create or Get an instance of the logger singleton +Settings *Settings::Instance() +{ + if (!m_pInstance) { + // Create the actual instance of logger only once + m_pInstance = new Settings; + m_pInstance->HARDWARE_DECODER = 0; + m_pInstance->HIGH_QUALITY_SCALING = false; + m_pInstance->MAX_WIDTH = 0; + m_pInstance->MAX_HEIGHT = 0; + m_pInstance->WAIT_FOR_VIDEO_PROCESSING_TASK = false; + m_pInstance->OMP_THREADS = 12; + m_pInstance->FF_THREADS = 8; + m_pInstance->DE_LIMIT_HEIGHT_MAX = 1100; + m_pInstance->DE_LIMIT_WIDTH_MAX = 1950; + m_pInstance->HW_DE_DEVICE_SET = 0; + m_pInstance->HW_EN_DEVICE_SET = 0; + m_pInstance->PLAYBACK_AUDIO_DEVICE_NAME = ""; + } + + return m_pInstance; +} diff -Nru libopenshot-0.2.2+dfsg1/src/TextReader.cpp libopenshot-0.2.5+dfsg1/src/TextReader.cpp --- libopenshot-0.2.2+dfsg1/src/TextReader.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/TextReader.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for TextReader class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -25,6 +28,9 @@ * along with OpenShot Library. If not, see . */ +// Require ImageMagick support +#ifdef USE_IMAGEMAGICK + #include "../include/TextReader.h" using namespace openshot; @@ -32,15 +38,23 @@ /// Default constructor (blank text) TextReader::TextReader() : width(1024), height(768), x_offset(0), y_offset(0), text(""), font("Arial"), size(10.0), text_color("#ffffff"), background_color("#000000"), is_open(false), gravity(GRAVITY_CENTER) { - // Open and Close the reader, to populate it's attributes (such as height, width, etc...) + // Open and Close the reader, to populate its attributes (such as height, width, etc...) Open(); Close(); } -TextReader::TextReader(int width, int height, int x_offset, int y_offset, GravityType gravity, string text, string font, double size, string text_color, string background_color) +TextReader::TextReader(int width, int height, int x_offset, int y_offset, GravityType gravity, std::string text, std::string font, double size, std::string text_color, std::string background_color) : width(width), height(height), x_offset(x_offset), y_offset(y_offset), text(text), font(font), size(size), text_color(text_color), background_color(background_color), is_open(false), gravity(gravity) { - // Open and Close the reader, to populate it's attributes (such as height, width, etc...) + // Open and Close the reader, to populate its attributes (such as height, width, etc...) + Open(); + Close(); +} + +void TextReader::SetTextBackgroundColor(std::string color) { + text_background_color = color; + + // Open and Close the reader, to populate it's attributes (such as height, width, etc...) plus the text background color Open(); Close(); } @@ -97,6 +111,10 @@ lines.push_back(Magick::DrawablePointSize(size)); lines.push_back(Magick::DrawableText(x_offset, y_offset, text)); + if (!text_background_color.empty()) { + lines.push_back(Magick::DrawableTextUnderColor(Magick::Color(text_background_color))); + } + // Draw image image->draw(lines); @@ -109,7 +127,7 @@ info.height = image->size().height(); info.pixel_ratio.num = 1; info.pixel_ratio.den = 1; - info.duration = 60 * 60 * 24; // 24 hour duration + info.duration = 60 * 60 * 1; // 1 hour duration info.fps.num = 30; info.fps.den = 1; info.video_timebase.num = 1; @@ -169,14 +187,14 @@ } // Generate JSON string of this object -string TextReader::Json() { +std::string TextReader::Json() const { // Return formatted string return JsonValue().toStyledString(); } -// Generate Json::JsonValue for this object -Json::Value TextReader::JsonValue() { +// Generate Json::Value for this object +Json::Value TextReader::JsonValue() const { // Create root json object Json::Value root = ReaderBase::JsonValue(); // get parent properties @@ -190,6 +208,7 @@ root["size"] = size; root["text_color"] = text_color; root["background_color"] = background_color; + root["text_background_color"] = text_background_color; root["gravity"] = gravity; // return JsonValue @@ -197,30 +216,22 @@ } // Load JSON string into this object -void TextReader::SetJson(string value) { - - // Parse JSON string into JSON objects - Json::Value root; - Json::Reader reader; - bool success = reader.parse( value, root ); - if (!success) - // Raise exception - throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); - +void TextReader::SetJson(const std::string value) { try { + Json::Value root = openshot::stringToJson(value); // Set all values that match SetJsonValue(root); } - catch (exception e) + catch (const std::exception& e) { // Error parsing JSON (or missing keys) - throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); } } -// Load Json::JsonValue into this object -void TextReader::SetJsonValue(Json::Value root) { +// Load Json::Value into this object +void TextReader::SetJsonValue(const Json::Value root) { // Set parent data ReaderBase::SetJsonValue(root); @@ -244,6 +255,8 @@ text_color = root["text_color"].asString(); if (!root["background_color"].isNull()) background_color = root["background_color"].asString(); + if (!root["text_background_color"].isNull()) + text_background_color = root["text_background_color"].asString(); if (!root["gravity"].isNull()) gravity = (GravityType) root["gravity"].asInt(); @@ -254,3 +267,5 @@ Open(); } } + +#endif //USE_IMAGEMAGICK diff -Nru libopenshot-0.2.2+dfsg1/src/Timeline.cpp libopenshot-0.2.5+dfsg1/src/Timeline.cpp --- libopenshot-0.2.2+dfsg1/src/Timeline.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/Timeline.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for Timeline class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -31,7 +34,7 @@ // Default Constructor for the timeline (which sets the canvas width and height) Timeline::Timeline(int width, int height, Fraction fps, int sample_rate, int channels, ChannelLayout channel_layout) : - is_open(false), auto_map_clips(true) + is_open(false), auto_map_clips(true), managed_cache(true) { // Create CrashHandler and Attach (incase of errors) CrashHandler::Instance(); @@ -58,15 +61,42 @@ info.has_audio = true; info.has_video = true; info.video_length = info.fps.ToFloat() * info.duration; + info.display_ratio = openshot::Fraction(width, height); + info.display_ratio.Reduce(); + info.pixel_ratio = openshot::Fraction(1, 1); // Init max image size - SetMaxSize(info.width, info.height); + SetMaxSize(info.width, info.height); // Init cache final_cache = new CacheMemory(); final_cache->SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 2, info.width, info.height, info.sample_rate, info.channels); } +Timeline::~Timeline() { + if (is_open) + // Auto Close if not already + Close(); + + // Free all allocated frame mappers + std::set::iterator it; + for (it = allocated_frame_mappers.begin(); it != allocated_frame_mappers.end(); ) { + // Dereference and clean up FrameMapper object + FrameMapper *mapper = (*it); + mapper->Reader(NULL); + mapper->Close(); + delete mapper; + // Remove reference and proceed to next element + it = allocated_frame_mappers.erase(it); + } + + // Destroy previous cache (if managed by timeline) + if (managed_cache && final_cache) { + delete final_cache; + final_cache = NULL; + } +} + // Add an openshot::Clip to the timeline void Timeline::AddClip(Clip* clip) { @@ -120,7 +150,9 @@ } else { // Create a new FrameMapper to wrap the current reader - clip_reader = (ReaderBase*) new FrameMapper(clip->Reader(), info.fps, PULLDOWN_NONE, info.sample_rate, info.channels, info.channel_layout); + FrameMapper* mapper = new FrameMapper(clip->Reader(), info.fps, PULLDOWN_NONE, info.sample_rate, info.channels, info.channel_layout); + allocated_frame_mappers.insert(mapper); + clip_reader = (ReaderBase*) mapper; } // Update the mapping @@ -138,12 +170,8 @@ ClearAllCache(); // Loop through all clips - list::iterator clip_itr; - for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr) + for (auto clip : clips) { - // Get clip object from the iterator - Clip *clip = (*clip_itr); - // Apply framemapper (or update existing framemapper) apply_mapper_to_clip(clip); } @@ -163,15 +191,11 @@ std::shared_ptr Timeline::apply_effects(std::shared_ptr frame, int64_t timeline_frame_number, int layer) { // Debug output - ZmqLogger::Instance()->AppendDebugMethod("Timeline::apply_effects", "frame->number", frame->number, "timeline_frame_number", timeline_frame_number, "layer", layer, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::apply_effects", "frame->number", frame->number, "timeline_frame_number", timeline_frame_number, "layer", layer); // Find Effects at this position and layer - list::iterator effect_itr; - for (effect_itr=effects.begin(); effect_itr != effects.end(); ++effect_itr) + for (auto effect : effects) { - // Get effect object from the iterator - EffectBase *effect = (*effect_itr); - // Does clip intersect the current requested time long effect_start_position = round(effect->Position() * info.fps.ToDouble()) + 1; long effect_end_position = round((effect->Position() + (effect->Duration())) * info.fps.ToDouble()) + 1; @@ -179,7 +203,7 @@ bool does_effect_intersect = (effect_start_position <= timeline_frame_number && effect_end_position >= timeline_frame_number && effect->Layer() == layer); // Debug output - ZmqLogger::Instance()->AppendDebugMethod("Timeline::apply_effects (Does effect intersect)", "effect->Position()", effect->Position(), "does_effect_intersect", does_effect_intersect, "timeline_frame_number", timeline_frame_number, "layer", layer, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::apply_effects (Does effect intersect)", "effect->Position()", effect->Position(), "does_effect_intersect", does_effect_intersect, "timeline_frame_number", timeline_frame_number, "layer", layer); // Clip is visible if (does_effect_intersect) @@ -189,7 +213,7 @@ long effect_frame_number = timeline_frame_number - effect_start_position + effect_start_frame; // Debug output - ZmqLogger::Instance()->AppendDebugMethod("Timeline::apply_effects (Process Effect)", "effect_frame_number", effect_frame_number, "does_effect_intersect", does_effect_intersect, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::apply_effects (Process Effect)", "effect_frame_number", effect_frame_number, "does_effect_intersect", does_effect_intersect); // Apply the effect to this frame frame = effect->GetFrame(frame, effect_frame_number); @@ -211,10 +235,7 @@ try { // Debug output - ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetOrCreateFrame (from reader)", "number", number, "samples_in_frame", samples_in_frame, "", -1, "", -1, "", -1, "", -1); - - // Set max image size (used for performance optimization) - clip->SetMaxSize(info.width, info.height); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetOrCreateFrame (from reader)", "number", number, "samples_in_frame", samples_in_frame); // Attempt to get a frame (but this could fail if a reader has just been closed) #pragma omp critical (T_GetOtCreateFrame) @@ -232,10 +253,10 @@ } // Debug output - ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetOrCreateFrame (create blank)", "number", number, "samples_in_frame", samples_in_frame, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetOrCreateFrame (create blank)", "number", number, "samples_in_frame", samples_in_frame); // Create blank frame - new_frame = std::make_shared(number, max_width, max_height, "#000000", samples_in_frame, info.channels); + new_frame = std::make_shared(number, Settings::Instance()->MAX_WIDTH, Settings::Instance()->MAX_HEIGHT, "#000000", samples_in_frame, info.channels); #pragma omp critical (T_GetOtCreateFrame) { new_frame->SampleRate(info.sample_rate); @@ -257,13 +278,13 @@ return; // Debug output - ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer", "new_frame->number", new_frame->number, "clip_frame_number", clip_frame_number, "timeline_frame_number", timeline_frame_number, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer", "new_frame->number", new_frame->number, "clip_frame_number", clip_frame_number, "timeline_frame_number", timeline_frame_number); /* REPLACE IMAGE WITH WAVEFORM IMAGE (IF NEEDED) */ if (source_clip->Waveform()) { // Debug output - ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Generate Waveform Image)", "source_frame->number", source_frame->number, "source_clip->Waveform()", source_clip->Waveform(), "clip_frame_number", clip_frame_number, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Generate Waveform Image)", "source_frame->number", source_frame->number, "source_clip->Waveform()", source_clip->Waveform(), "clip_frame_number", clip_frame_number); // Get the color of the waveform int red = source_clip->wave_color.red.GetInt(clip_frame_number); @@ -274,15 +295,16 @@ // Generate Waveform Dynamically (the size of the timeline) std::shared_ptr source_image; #pragma omp critical (T_addLayer) - source_image = source_frame->GetWaveform(max_width, max_height, red, green, blue, alpha); + source_image = source_frame->GetWaveform(Settings::Instance()->MAX_WIDTH, Settings::Instance()->MAX_HEIGHT, red, green, blue, alpha); source_frame->AddImage(std::shared_ptr(source_image)); } /* Apply effects to the source frame (if any). If multiple clips are overlapping, only process the * effects on the top clip. */ - if (is_top_clip && source_frame) + if (is_top_clip && source_frame) { #pragma omp critical (T_addLayer) source_frame = apply_effects(source_frame, timeline_frame_number, source_clip->Layer()); + } // Declare an image to hold the source frame's image std::shared_ptr source_image; @@ -290,7 +312,7 @@ /* COPY AUDIO - with correct volume */ if (source_clip->Reader()->info.has_audio) { // Debug output - ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Copy Audio)", "source_clip->Reader()->info.has_audio", source_clip->Reader()->info.has_audio, "source_frame->GetAudioChannelsCount()", source_frame->GetAudioChannelsCount(), "info.channels", info.channels, "clip_frame_number", clip_frame_number, "timeline_frame_number", timeline_frame_number, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Copy Audio)", "source_clip->Reader()->info.has_audio", source_clip->Reader()->info.has_audio, "source_frame->GetAudioChannelsCount()", source_frame->GetAudioChannelsCount(), "info.channels", info.channels, "clip_frame_number", clip_frame_number, "timeline_frame_number", timeline_frame_number); if (source_frame->GetAudioChannelsCount() == info.channels && source_clip->has_audio.GetInt(clip_frame_number) != 0) for (int channel = 0; channel < source_frame->GetAudioChannelsCount(); channel++) @@ -346,17 +368,18 @@ } else // Debug output - ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (No Audio Copied - Wrong # of Channels)", "source_clip->Reader()->info.has_audio", source_clip->Reader()->info.has_audio, "source_frame->GetAudioChannelsCount()", source_frame->GetAudioChannelsCount(), "info.channels", info.channels, "clip_frame_number", clip_frame_number, "timeline_frame_number", timeline_frame_number, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (No Audio Copied - Wrong # of Channels)", "source_clip->Reader()->info.has_audio", source_clip->Reader()->info.has_audio, "source_frame->GetAudioChannelsCount()", source_frame->GetAudioChannelsCount(), "info.channels", info.channels, "clip_frame_number", clip_frame_number, "timeline_frame_number", timeline_frame_number); } - // Skip out if only an audio frame - if (!source_clip->Waveform() && !source_clip->Reader()->info.has_video) + // Skip out if video was disabled or only an audio frame (no visualisation in use) + if (source_clip->has_video.GetInt(clip_frame_number) == 0 || + (!source_clip->Waveform() && !source_clip->Reader()->info.has_video)) // Skip the rest of the image processing for performance reasons return; // Debug output - ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Get Source Image)", "source_frame->number", source_frame->number, "source_clip->Waveform()", source_clip->Waveform(), "clip_frame_number", clip_frame_number, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Get Source Image)", "source_frame->number", source_frame->number, "source_clip->Waveform()", source_clip->Waveform(), "clip_frame_number", clip_frame_number); // Get actual frame image data source_image = source_frame->GetImage(); @@ -380,44 +403,97 @@ } // Debug output - ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Set Alpha & Opacity)", "alpha", alpha, "source_frame->number", source_frame->number, "clip_frame_number", clip_frame_number, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Set Alpha & Opacity)", "alpha", alpha, "source_frame->number", source_frame->number, "clip_frame_number", clip_frame_number); } /* RESIZE SOURCE IMAGE - based on scale type */ QSize source_size = source_image->size(); switch (source_clip->scale) { - case (SCALE_FIT): - // keep aspect ratio - source_size.scale(max_width, max_height, Qt::KeepAspectRatio); + case (SCALE_FIT): { + // keep aspect ratio + source_size.scale(Settings::Instance()->MAX_WIDTH, Settings::Instance()->MAX_HEIGHT, Qt::KeepAspectRatio); - // Debug output - ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Scale: SCALE_FIT)", "source_frame->number", source_frame->number, "source_width", source_size.width(), "source_height", source_size.height(), "", -1, "", -1, "", -1); - break; - - case (SCALE_STRETCH): - // ignore aspect ratio - source_size.scale(max_width, max_height, Qt::IgnoreAspectRatio); + // Debug output + ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Scale: SCALE_FIT)", "source_frame->number", source_frame->number, "source_width", source_size.width(), "source_height", source_size.height()); + break; + } + case (SCALE_STRETCH): { + // ignore aspect ratio + source_size.scale(Settings::Instance()->MAX_WIDTH, Settings::Instance()->MAX_HEIGHT, Qt::IgnoreAspectRatio); - // Debug output - ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Scale: SCALE_STRETCH)", "source_frame->number", source_frame->number, "source_width", source_size.width(), "source_height", source_size.height(), "", -1, "", -1, "", -1); - break; + // Debug output + ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Scale: SCALE_STRETCH)", "source_frame->number", source_frame->number, "source_width", source_size.width(), "source_height", source_size.height()); + break; + } + case (SCALE_CROP): { + QSize width_size(Settings::Instance()->MAX_WIDTH, round(Settings::Instance()->MAX_WIDTH / (float(source_size.width()) / float(source_size.height())))); + QSize height_size(round(Settings::Instance()->MAX_HEIGHT / (float(source_size.height()) / float(source_size.width()))), Settings::Instance()->MAX_HEIGHT); + + // respect aspect ratio + if (width_size.width() >= Settings::Instance()->MAX_WIDTH && width_size.height() >= Settings::Instance()->MAX_HEIGHT) + source_size.scale(width_size.width(), width_size.height(), Qt::KeepAspectRatio); + else + source_size.scale(height_size.width(), height_size.height(), Qt::KeepAspectRatio); - case (SCALE_CROP): - QSize width_size(max_width, round(max_width / (float(source_size.width()) / float(source_size.height())))); - QSize height_size(round(max_height / (float(source_size.height()) / float(source_size.width()))), max_height); - - // respect aspect ratio - if (width_size.width() >= max_width && width_size.height() >= max_height) - source_size.scale(width_size.width(), width_size.height(), Qt::KeepAspectRatio); - else - source_size.scale(height_size.width(), height_size.height(), Qt::KeepAspectRatio); + // Debug output + ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Scale: SCALE_CROP)", "source_frame->number", source_frame->number, "source_width", source_size.width(), "source_height", source_size.height()); + break; + } + case (SCALE_NONE): { + // Calculate ratio of source size to project size + // Even with no scaling, previews need to be adjusted correctly + // (otherwise NONE scaling draws the frame image outside of the preview) + float source_width_ratio = source_size.width() / float(info.width); + float source_height_ratio = source_size.height() / float(info.height); + source_size.scale(Settings::Instance()->MAX_WIDTH * source_width_ratio, Settings::Instance()->MAX_HEIGHT * source_height_ratio, Qt::KeepAspectRatio); - // Debug output - ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Scale: SCALE_CROP)", "source_frame->number", source_frame->number, "source_width", source_size.width(), "source_height", source_size.height(), "", -1, "", -1, "", -1); - break; + // Debug output + ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Scale: SCALE_NONE)", "source_frame->number", source_frame->number, "source_width", source_size.width(), "source_height", source_size.height()); + break; + } } + float crop_x = source_clip->crop_x.GetValue(clip_frame_number); + float crop_y = source_clip->crop_y.GetValue(clip_frame_number); + float crop_w = source_clip->crop_width.GetValue(clip_frame_number); + float crop_h = source_clip->crop_height.GetValue(clip_frame_number); + switch(source_clip->crop_gravity) + { + case (GRAVITY_TOP_LEFT): + // This is only here to prevent unused-enum warnings + break; + case (GRAVITY_TOP): + crop_x += 0.5; + break; + case (GRAVITY_TOP_RIGHT): + crop_x += 1.0; + break; + case (GRAVITY_LEFT): + crop_y += 0.5; + break; + case (GRAVITY_CENTER): + crop_x += 0.5; + crop_y += 0.5; + break; + case (GRAVITY_RIGHT): + crop_x += 1.0; + crop_y += 0.5; + break; + case (GRAVITY_BOTTOM_LEFT): + crop_y += 1.0; + break; + case (GRAVITY_BOTTOM): + crop_x += 0.5; + crop_y += 1.0; + break; + case (GRAVITY_BOTTOM_RIGHT): + crop_x += 1.0; + crop_y += 1.0; + break; + } + + /* GRAVITY LOCATION - Initialize X & Y to the correct values (before applying location curves) */ float x = 0.0; // left float y = 0.0; // top @@ -430,33 +506,36 @@ switch (source_clip->gravity) { + case (GRAVITY_TOP_LEFT): + // This is only here to prevent unused-enum warnings + break; case (GRAVITY_TOP): - x = (max_width - scaled_source_width) / 2.0; // center + x = (Settings::Instance()->MAX_WIDTH - scaled_source_width) / 2.0; // center break; case (GRAVITY_TOP_RIGHT): - x = max_width - scaled_source_width; // right + x = Settings::Instance()->MAX_WIDTH - scaled_source_width; // right break; case (GRAVITY_LEFT): - y = (max_height - scaled_source_height) / 2.0; // center + y = (Settings::Instance()->MAX_HEIGHT - scaled_source_height) / 2.0; // center break; case (GRAVITY_CENTER): - x = (max_width - scaled_source_width) / 2.0; // center - y = (max_height - scaled_source_height) / 2.0; // center + x = (Settings::Instance()->MAX_WIDTH - scaled_source_width) / 2.0; // center + y = (Settings::Instance()->MAX_HEIGHT - scaled_source_height) / 2.0; // center break; case (GRAVITY_RIGHT): - x = max_width - scaled_source_width; // right - y = (max_height - scaled_source_height) / 2.0; // center + x = Settings::Instance()->MAX_WIDTH - scaled_source_width; // right + y = (Settings::Instance()->MAX_HEIGHT - scaled_source_height) / 2.0; // center break; case (GRAVITY_BOTTOM_LEFT): - y = (max_height - scaled_source_height); // bottom + y = (Settings::Instance()->MAX_HEIGHT - scaled_source_height); // bottom break; case (GRAVITY_BOTTOM): - x = (max_width - scaled_source_width) / 2.0; // center - y = (max_height - scaled_source_height); // bottom + x = (Settings::Instance()->MAX_WIDTH - scaled_source_width) / 2.0; // center + y = (Settings::Instance()->MAX_HEIGHT - scaled_source_height); // bottom break; case (GRAVITY_BOTTOM_RIGHT): - x = max_width - scaled_source_width; // right - y = (max_height - scaled_source_height); // bottom + x = Settings::Instance()->MAX_WIDTH - scaled_source_width; // right + y = (Settings::Instance()->MAX_HEIGHT - scaled_source_height); // bottom break; } @@ -465,8 +544,8 @@ /* LOCATION, ROTATION, AND SCALE */ float r = source_clip->rotation.GetValue(clip_frame_number); // rotate in degrees - x += (max_width * source_clip->location_x.GetValue(clip_frame_number)); // move in percentage of final width - y += (max_height * source_clip->location_y.GetValue(clip_frame_number)); // move in percentage of final height + x += (Settings::Instance()->MAX_WIDTH * source_clip->location_x.GetValue(clip_frame_number)); // move in percentage of final width + y += (Settings::Instance()->MAX_HEIGHT * source_clip->location_y.GetValue(clip_frame_number)); // move in percentage of final height float shear_x = source_clip->shear_x.GetValue(clip_frame_number); float shear_y = source_clip->shear_y.GetValue(clip_frame_number); @@ -508,7 +587,7 @@ } // Debug output - ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Transform: Composite Image Layer: Prepare)", "source_frame->number", source_frame->number, "new_frame->GetImage()->width()", new_frame->GetImage()->width(), "transformed", transformed, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Transform: Composite Image Layer: Prepare)", "source_frame->number", source_frame->number, "new_frame->GetImage()->width()", new_frame->GetImage()->width(), "transformed", transformed); /* COMPOSITE SOURCE IMAGE (LAYER) ONTO FINAL IMAGE */ std::shared_ptr new_image; @@ -525,13 +604,17 @@ // Composite a new layer onto the image painter.setCompositionMode(QPainter::CompositionMode_SourceOver); - painter.drawImage(0, 0, *source_image); + painter.drawImage(0, 0, *source_image, crop_x * source_image->width(), crop_y * source_image->height(), crop_w * source_image->width(), crop_h * source_image->height()); // Draw frame #'s on top of image (if needed) if (source_clip->display != FRAME_DISPLAY_NONE) { - stringstream frame_number_str; + std::stringstream frame_number_str; switch (source_clip->display) { + case (FRAME_DISPLAY_NONE): + // This is only here to prevent unused-enum warnings + break; + case (FRAME_DISPLAY_CLIP): frame_number_str << clip_frame_number; break; @@ -553,13 +636,13 @@ painter.end(); // Debug output - ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Transform: Composite Image Layer: Completed)", "source_frame->number", source_frame->number, "new_frame->GetImage()->width()", new_frame->GetImage()->width(), "transformed", transformed, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Transform: Composite Image Layer: Completed)", "source_frame->number", source_frame->number, "new_frame->GetImage()->width()", new_frame->GetImage()->width(), "transformed", transformed); } // Update the list of 'opened' clips void Timeline::update_open_clips(Clip *clip, bool does_clip_intersect) { - ZmqLogger::Instance()->AppendDebugMethod("Timeline::update_open_clips (before)", "does_clip_intersect", does_clip_intersect, "closing_clips.size()", closing_clips.size(), "open_clips.size()", open_clips.size(), "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::update_open_clips (before)", "does_clip_intersect", does_clip_intersect, "closing_clips.size()", closing_clips.size(), "open_clips.size()", open_clips.size()); // is clip already in list? bool clip_found = open_clips.count(clip); @@ -587,14 +670,14 @@ } // Debug output - ZmqLogger::Instance()->AppendDebugMethod("Timeline::update_open_clips (after)", "does_clip_intersect", does_clip_intersect, "clip_found", clip_found, "closing_clips.size()", closing_clips.size(), "open_clips.size()", open_clips.size(), "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::update_open_clips (after)", "does_clip_intersect", does_clip_intersect, "clip_found", clip_found, "closing_clips.size()", closing_clips.size(), "open_clips.size()", open_clips.size()); } // Sort clips by position on the timeline void Timeline::sort_clips() { // Debug output - ZmqLogger::Instance()->AppendDebugMethod("Timeline::SortClips", "clips.size()", clips.size(), "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::SortClips", "clips.size()", clips.size()); // sort clips clips.sort(CompareClips()); @@ -610,15 +693,11 @@ // Close the reader (and any resources it was consuming) void Timeline::Close() { - ZmqLogger::Instance()->AppendDebugMethod("Timeline::Close", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::Close"); // Close all open clips - list::iterator clip_itr; - for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr) + for (auto clip : clips) { - // Get clip object from the iterator - Clip *clip = (*clip_itr); - // Open or Close this clip, based on if it's intersecting or not update_open_clips(clip, false); } @@ -655,7 +734,7 @@ frame = final_cache->GetFrame(requested_frame); if (frame) { // Debug output - ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (Cached frame found)", "requested_frame", requested_frame, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (Cached frame found)", "requested_frame", requested_frame); // Return cached frame return frame; @@ -667,14 +746,14 @@ // Check for open reader (or throw exception) if (!is_open) - throw ReaderClosed("The Timeline is closed. Call Open() before calling this method.", ""); + throw ReaderClosed("The Timeline is closed. Call Open() before calling this method."); // Check cache again (due to locking) #pragma omp critical (T_GetFrame) frame = final_cache->GetFrame(requested_frame); if (frame) { // Debug output - ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (Cached frame found on 2nd look)", "requested_frame", requested_frame, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (Cached frame found on 2nd look)", "requested_frame", requested_frame); // Return cached frame return frame; @@ -685,7 +764,7 @@ // Get a list of clips that intersect with the requested section of timeline // This also opens the readers for intersecting clips, and marks non-intersecting clips as 'needs closing' - vector nearby_clips; + std::vector nearby_clips; #pragma omp critical (T_GetFrame) nearby_clips = find_intersecting_clips(requested_frame, minimum_frames, true); @@ -694,17 +773,15 @@ omp_set_nested(true); // Debug output - ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame", "requested_frame", requested_frame, "minimum_frames", minimum_frames, "OPEN_MP_NUM_PROCESSORS", OPEN_MP_NUM_PROCESSORS, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame", "requested_frame", requested_frame, "minimum_frames", minimum_frames, "OPEN_MP_NUM_PROCESSORS", OPEN_MP_NUM_PROCESSORS); // GENERATE CACHE FOR CLIPS (IN FRAME # SEQUENCE) // Determine all clip frames, and request them in order (to keep resampled audio in sequence) for (int64_t frame_number = requested_frame; frame_number < requested_frame + minimum_frames; frame_number++) { // Loop through clips - for (int clip_index = 0; clip_index < nearby_clips.size(); clip_index++) + for (auto clip : nearby_clips) { - // Get clip object from the iterator - Clip *clip = nearby_clips[clip_index]; long clip_start_position = round(clip->Position() * info.fps.ToDouble()) + 1; long clip_end_position = round((clip->Position() + clip->Duration()) * info.fps.ToDouble()) + 1; @@ -727,13 +804,13 @@ for (int64_t frame_number = requested_frame; frame_number < requested_frame + minimum_frames; frame_number++) { // Debug output - ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (processing frame)", "frame_number", frame_number, "omp_get_thread_num()", omp_get_thread_num(), "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (processing frame)", "frame_number", frame_number, "omp_get_thread_num()", omp_get_thread_num()); // Init some basic properties about this frame int samples_in_frame = Frame::GetSamplesPerFrame(frame_number, info.fps, info.sample_rate, info.channels); // Create blank frame (which will become the requested frame) - std::shared_ptr new_frame(std::make_shared(frame_number, max_width, max_height, "#000000", samples_in_frame, info.channels)); + std::shared_ptr new_frame(std::make_shared(frame_number, Settings::Instance()->MAX_WIDTH, Settings::Instance()->MAX_HEIGHT, "#000000", samples_in_frame, info.channels)); #pragma omp critical (T_GetFrame) { new_frame->AddAudioSilence(samples_in_frame); @@ -742,28 +819,26 @@ } // Debug output - ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (Adding solid color)", "frame_number", frame_number, "info.width", info.width, "info.height", info.height, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (Adding solid color)", "frame_number", frame_number, "info.width", info.width, "info.height", info.height); // Add Background Color to 1st layer (if animated or not black) - if ((color.red.Points.size() > 1 || color.green.Points.size() > 1 || color.blue.Points.size() > 1) || + if ((color.red.GetCount() > 1 || color.green.GetCount() > 1 || color.blue.GetCount() > 1) || (color.red.GetValue(frame_number) != 0.0 || color.green.GetValue(frame_number) != 0.0 || color.blue.GetValue(frame_number) != 0.0)) - new_frame->AddColor(max_width, max_height, color.GetColorHex(frame_number)); + new_frame->AddColor(Settings::Instance()->MAX_WIDTH, Settings::Instance()->MAX_HEIGHT, color.GetColorHex(frame_number)); // Debug output - ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (Loop through clips)", "frame_number", frame_number, "clips.size()", clips.size(), "nearby_clips.size()", nearby_clips.size(), "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (Loop through clips)", "frame_number", frame_number, "clips.size()", clips.size(), "nearby_clips.size()", nearby_clips.size()); // Find Clips near this time - for (int clip_index = 0; clip_index < nearby_clips.size(); clip_index++) + for (auto clip : nearby_clips) { - // Get clip object from the iterator - Clip *clip = nearby_clips[clip_index]; long clip_start_position = round(clip->Position() * info.fps.ToDouble()) + 1; long clip_end_position = round((clip->Position() + clip->Duration()) * info.fps.ToDouble()) + 1; bool does_clip_intersect = (clip_start_position <= frame_number && clip_end_position >= frame_number); // Debug output - ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (Does clip intersect)", "frame_number", frame_number, "clip->Position()", clip->Position(), "clip->Duration()", clip->Duration(), "does_clip_intersect", does_clip_intersect, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (Does clip intersect)", "frame_number", frame_number, "clip->Position()", clip->Position(), "clip->Duration()", clip->Duration(), "does_clip_intersect", does_clip_intersect); // Clip is visible if (does_clip_intersect) @@ -771,9 +846,8 @@ // Determine if clip is "top" clip on this layer (only happens when multiple clips are overlapping) bool is_top_clip = true; float max_volume = 0.0; - for (int top_clip_index = 0; top_clip_index < nearby_clips.size(); top_clip_index++) + for (auto nearby_clip : nearby_clips) { - Clip *nearby_clip = nearby_clips[top_clip_index]; long nearby_clip_start_position = round(nearby_clip->Position() * info.fps.ToDouble()) + 1; long nearby_clip_end_position = round((nearby_clip->Position() + nearby_clip->Duration()) * info.fps.ToDouble()) + 1; long nearby_clip_start_frame = (nearby_clip->Start() * info.fps.ToDouble()) + 1; @@ -799,19 +873,19 @@ long clip_frame_number = frame_number - clip_start_position + clip_start_frame; // Debug output - ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (Calculate clip's frame #)", "clip->Position()", clip->Position(), "clip->Start()", clip->Start(), "info.fps.ToFloat()", info.fps.ToFloat(), "clip_frame_number", clip_frame_number, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (Calculate clip's frame #)", "clip->Position()", clip->Position(), "clip->Start()", clip->Start(), "info.fps.ToFloat()", info.fps.ToFloat(), "clip_frame_number", clip_frame_number); // Add clip's frame as layer add_layer(new_frame, clip, clip_frame_number, frame_number, is_top_clip, max_volume); } else // Debug output - ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (clip does not intersect)", "frame_number", frame_number, "does_clip_intersect", does_clip_intersect, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (clip does not intersect)", "frame_number", frame_number, "does_clip_intersect", does_clip_intersect); } // end clip loop // Debug output - ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (Add frame to cache)", "frame_number", frame_number, "info.width", info.width, "info.height", info.height, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (Add frame to cache)", "frame_number", frame_number, "info.width", info.width, "info.height", info.height); // Set frame # on mapped frame #pragma omp ordered @@ -826,7 +900,7 @@ } // end parallel // Debug output - ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (end parallel region)", "requested_frame", requested_frame, "omp_get_thread_num()", omp_get_thread_num(), "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (end parallel region)", "requested_frame", requested_frame, "omp_get_thread_num()", omp_get_thread_num()); // Return frame (or blank frame) return final_cache->GetFrame(requested_frame); @@ -835,10 +909,10 @@ // Find intersecting clips (or non intersecting clips) -vector Timeline::find_intersecting_clips(int64_t requested_frame, int number_of_frames, bool include) +std::vector Timeline::find_intersecting_clips(int64_t requested_frame, int number_of_frames, bool include) { // Find matching clips - vector matching_clips; + std::vector matching_clips; // Calculate time of frame float min_requested_frame = requested_frame; @@ -848,12 +922,8 @@ sort_clips(); // Find Clips at this time - list::iterator clip_itr; - for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr) + for (auto clip : clips) { - // Get clip object from the iterator - Clip *clip = (*clip_itr); - // Does clip intersect the current requested time long clip_start_position = round(clip->Position() * info.fps.ToDouble()) + 1; long clip_end_position = round((clip->Position() + clip->Duration()) * info.fps.ToDouble()) + 1; @@ -863,7 +933,7 @@ (clip_end_position >= min_requested_frame || clip_end_position >= max_requested_frame); // Debug output - ZmqLogger::Instance()->AppendDebugMethod("Timeline::find_intersecting_clips (Is clip near or intersecting)", "requested_frame", requested_frame, "min_requested_frame", min_requested_frame, "max_requested_frame", max_requested_frame, "clip->Position()", clip->Position(), "does_clip_intersect", does_clip_intersect, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::find_intersecting_clips (Is clip near or intersecting)", "requested_frame", requested_frame, "min_requested_frame", min_requested_frame, "max_requested_frame", max_requested_frame, "clip->Position()", clip->Position(), "does_clip_intersect", does_clip_intersect); // Open (or schedule for closing) this clip, based on if it's intersecting or not #pragma omp critical (reader_lock) @@ -884,21 +954,28 @@ return matching_clips; } -// Get the cache object used by this reader +// Set the cache object used by this reader void Timeline::SetCache(CacheBase* new_cache) { + // Destroy previous cache (if managed by timeline) + if (managed_cache && final_cache) { + delete final_cache; + final_cache = NULL; + managed_cache = false; + } + // Set new cache final_cache = new_cache; } // Generate JSON string of this object -string Timeline::Json() { +std::string Timeline::Json() const { // Return formatted string return JsonValue().toStyledString(); } -// Generate Json::JsonValue for this object -Json::Value Timeline::JsonValue() { +// Generate Json::Value for this object +Json::Value Timeline::JsonValue() const { // Create root json object Json::Value root = ReaderBase::JsonValue(); // get parent properties @@ -912,11 +989,8 @@ root["clips"] = Json::Value(Json::arrayValue); // Find Clips at this time - list::iterator clip_itr; - for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr) + for (const auto existing_clip : clips) { - // Get clip object from the iterator - Clip *existing_clip = (*clip_itr); root["clips"].append(existing_clip->JsonValue()); } @@ -924,11 +998,8 @@ root["effects"] = Json::Value(Json::arrayValue); // loop through effects - list::iterator effect_itr; - for (effect_itr=effects.begin(); effect_itr != effects.end(); ++effect_itr) + for (const auto existing_effect: effects) { - // Get clip object from the iterator - EffectBase *existing_effect = (*effect_itr); root["effects"].append(existing_effect->JsonValue()); } @@ -937,33 +1008,27 @@ } // Load JSON string into this object -void Timeline::SetJson(string value) { +void Timeline::SetJson(const std::string value) { // Get lock (prevent getting frames while this happens) const GenericScopedLock lock(getFrameCriticalSection); // Parse JSON string into JSON objects - Json::Value root; - Json::Reader reader; - bool success = reader.parse( value, root ); - if (!success) - // Raise exception - throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); - try { + const Json::Value root = openshot::stringToJson(value); // Set all values that match SetJsonValue(root); } - catch (exception e) + catch (const std::exception& e) { // Error parsing JSON (or missing keys) - throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); } } -// Load Json::JsonValue into this object -void Timeline::SetJsonValue(Json::Value root) { +// Load Json::Value into this object +void Timeline::SetJsonValue(const Json::Value root) { // Close timeline before we do anything (this also removes all open and closing clips) bool was_open = is_open; @@ -977,10 +1042,7 @@ clips.clear(); // loop through clips - for (int x = 0; x < root["clips"].size(); x++) { - // Get each clip - Json::Value existing_clip = root["clips"][x]; - + for (const Json::Value existing_clip : root["clips"]) { // Create Clip Clip *c = new Clip(); @@ -997,16 +1059,13 @@ effects.clear(); // loop through effects - for (int x = 0; x < root["effects"].size(); x++) { - // Get each effect - Json::Value existing_effect = root["effects"][x]; - + for (const Json::Value existing_effect :root["effects"]) { // Create Effect EffectBase *e = NULL; if (!existing_effect["type"].isNull()) { // Create instance of effect - if (e = EffectInfo().CreateEffect(existing_effect["type"].asString())) { + if ( (e = EffectInfo().CreateEffect(existing_effect["type"].asString())) ) { // Load Json into Effect e->SetJsonValue(existing_effect); @@ -1030,33 +1089,25 @@ } // Apply a special formatted JSON object, which represents a change to the timeline (insert, update, delete) -void Timeline::ApplyJsonDiff(string value) { +void Timeline::ApplyJsonDiff(std::string value) { // Get lock (prevent getting frames while this happens) const GenericScopedLock lock(getFrameCriticalSection); // Parse JSON string into JSON objects - Json::Value root; - Json::Reader reader; - bool success = reader.parse( value, root ); - if (!success || !root.isArray()) - // Raise exception - throw InvalidJSON("JSON could not be parsed (or is invalid).", ""); - try { + const Json::Value root = openshot::stringToJson(value); // Process the JSON change array, loop through each item - for (int x = 0; x < root.size(); x++) { - // Get each change - Json::Value change = root[x]; - string root_key = change["key"][(uint)0].asString(); + for (const Json::Value change : root) { + std::string change_key = change["key"][(uint)0].asString(); // Process each type of change - if (root_key == "clips") + if (change_key == "clips") // Apply to CLIPS apply_json_to_clips(change); - else if (root_key == "effects") + else if (change_key == "effects") // Apply to EFFECTS apply_json_to_effects(change); @@ -1066,10 +1117,10 @@ } } - catch (exception e) + catch (const std::exception& e) { // Error parsing JSON (or missing keys) - throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); } } @@ -1077,15 +1128,13 @@ void Timeline::apply_json_to_clips(Json::Value change) { // Get key and type of change - string change_type = change["type"].asString(); - string clip_id = ""; + std::string change_type = change["type"].asString(); + std::string clip_id = ""; Clip *existing_clip = NULL; // Find id of clip (if any) - for (int x = 0; x < change["key"].size(); x++) { + for (auto key_part : change["key"]) { // Get each change - Json::Value key_part = change["key"][x]; - if (key_part.isObject()) { // Check for id if (!key_part["id"].isNull()) { @@ -1093,11 +1142,8 @@ clip_id = key_part["id"].asString(); // Find matching clip in timeline (if any) - list::iterator clip_itr; - for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr) + for (auto c : clips) { - // Get clip object from the iterator - Clip *c = (*clip_itr); if (c->Id() == clip_id) { existing_clip = c; break; // clip found, exit loop @@ -1120,15 +1166,12 @@ if (!key_part["id"].isNull()) { // Set the id - string effect_id = key_part["id"].asString(); + std::string effect_id = key_part["id"].asString(); // Find matching effect in timeline (if any) - list effect_list = existing_clip->Effects(); - list::iterator effect_itr; - for (effect_itr=effect_list.begin(); effect_itr != effect_list.end(); ++effect_itr) + std::list effect_list = existing_clip->Effects(); + for (auto e : effect_list) { - // Get effect object from the iterator - EffectBase *e = (*effect_itr); if (e->Id() == effect_id) { // Apply the change to the effect directly apply_json_to_effects(change, e); @@ -1182,17 +1225,6 @@ // Apply framemapper (or update existing framemapper) apply_mapper_to_clip(existing_clip); - - // Clear any cached image sizes (since size might have changed) - existing_clip->SetMaxSize(0, 0); // force clearing of cached image size - if (existing_clip->Reader()) { - existing_clip->Reader()->SetMaxSize(0, 0); - if (existing_clip->Reader()->Name() == "FrameMapper") { - FrameMapper *nested_reader = (FrameMapper *) existing_clip->Reader(); - if (nested_reader->Reader()) - nested_reader->Reader()->SetMaxSize(0, 0); - } - } } } else if (change_type == "delete") { @@ -1217,27 +1249,22 @@ void Timeline::apply_json_to_effects(Json::Value change) { // Get key and type of change - string change_type = change["type"].asString(); + std::string change_type = change["type"].asString(); EffectBase *existing_effect = NULL; // Find id of an effect (if any) - for (int x = 0; x < change["key"].size(); x++) { - // Get each change - Json::Value key_part = change["key"][x]; + for (auto key_part : change["key"]) { if (key_part.isObject()) { // Check for id if (!key_part["id"].isNull()) { // Set the id - string effect_id = key_part["id"].asString(); + std::string effect_id = key_part["id"].asString(); // Find matching effect in timeline (if any) - list::iterator effect_itr; - for (effect_itr=effects.begin(); effect_itr != effects.end(); ++effect_itr) + for (auto e : effects) { - // Get effect object from the iterator - EffectBase *e = (*effect_itr); if (e->Id() == effect_id) { existing_effect = e; break; // effect found, exit loop @@ -1258,7 +1285,7 @@ void Timeline::apply_json_to_effects(Json::Value change, EffectBase* existing_effect) { // Get key and type of change - string change_type = change["type"].asString(); + std::string change_type = change["type"].asString(); // Calculate start and end frames that this impacts, and remove those frames from the cache if (!change["value"].isArray() && !change["value"]["position"].isNull()) { @@ -1271,13 +1298,13 @@ if (change_type == "insert") { // Determine type of effect - string effect_type = change["value"]["type"].asString(); + std::string effect_type = change["value"]["type"].asString(); // Create Effect EffectBase *e = NULL; // Init the matching effect object - if (e = EffectInfo().CreateEffect(effect_type)) { + if ( (e = EffectInfo().CreateEffect(effect_type)) ) { // Load Json into Effect e->SetJsonValue(change["value"]); @@ -1321,9 +1348,9 @@ void Timeline::apply_json_to_timeline(Json::Value change) { // Get key and type of change - string change_type = change["type"].asString(); - string root_key = change["key"][(uint)0].asString(); - string sub_key = ""; + std::string change_type = change["type"].asString(); + std::string root_key = change["key"][(uint)0].asString(); + std::string sub_key = ""; if (change["key"].size() >= 2) sub_key = change["key"][(uint)1].asString(); @@ -1371,6 +1398,33 @@ else if (root_key == "fps" && sub_key == "den") // Set fps.den info.fps.den = change["value"].asInt(); + else if (root_key == "display_ratio" && sub_key == "" && change["value"].isObject()) { + // Set display_ratio fraction + if (!change["value"]["num"].isNull()) + info.display_ratio.num = change["value"]["num"].asInt(); + if (!change["value"]["den"].isNull()) + info.display_ratio.den = change["value"]["den"].asInt(); + } + else if (root_key == "display_ratio" && sub_key == "num") + // Set display_ratio.num + info.display_ratio.num = change["value"].asInt(); + else if (root_key == "display_ratio" && sub_key == "den") + // Set display_ratio.den + info.display_ratio.den = change["value"].asInt(); + else if (root_key == "pixel_ratio" && sub_key == "" && change["value"].isObject()) { + // Set pixel_ratio fraction + if (!change["value"]["num"].isNull()) + info.pixel_ratio.num = change["value"]["num"].asInt(); + if (!change["value"]["den"].isNull()) + info.pixel_ratio.den = change["value"]["den"].asInt(); + } + else if (root_key == "pixel_ratio" && sub_key == "num") + // Set pixel_ratio.num + info.pixel_ratio.num = change["value"].asInt(); + else if (root_key == "pixel_ratio" && sub_key == "den") + // Set pixel_ratio.den + info.pixel_ratio.den = change["value"].asInt(); + else if (root_key == "sample_rate") // Set sample rate info.sample_rate = change["value"].asInt(); @@ -1380,9 +1434,7 @@ else if (root_key == "channel_layout") // Set channel layout info.channel_layout = (ChannelLayout) change["value"].asInt(); - else - // Error parsing JSON (or missing keys) throw InvalidJSONKey("JSON change key is invalid", change.toStyledString()); @@ -1421,12 +1473,8 @@ final_cache->Clear(); // Loop through all clips - list::iterator clip_itr; - for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr) + for (auto clip : clips) { - // Get clip object from the iterator - Clip *clip = (*clip_itr); - // Clear cache on clip clip->Reader()->GetCache()->Clear(); @@ -1439,3 +1487,18 @@ } } + +// Set Max Image Size (used for performance optimization). Convenience function for setting +// Settings::Instance()->MAX_WIDTH and Settings::Instance()->MAX_HEIGHT. +void Timeline::SetMaxSize(int width, int height) { + // Maintain aspect ratio regardless of what size is passed in + QSize display_ratio_size = QSize(info.display_ratio.num * info.pixel_ratio.ToFloat(), info.display_ratio.den * info.pixel_ratio.ToFloat()); + QSize proposed_size = QSize(std::min(width, info.width), std::min(height, info.height)); + + // Scale QSize up to proposed size + display_ratio_size.scale(proposed_size, Qt::KeepAspectRatio); + + // Set max size + Settings::Instance()->MAX_WIDTH = display_ratio_size.width(); + Settings::Instance()->MAX_HEIGHT = display_ratio_size.height(); +} diff -Nru libopenshot-0.2.2+dfsg1/src/WriterBase.cpp libopenshot-0.2.5+dfsg1/src/WriterBase.cpp --- libopenshot-0.2.2+dfsg1/src/WriterBase.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/WriterBase.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for WriterBase class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -97,53 +100,53 @@ // Display file information void WriterBase::DisplayInfo() { - cout << fixed << setprecision(2) << boolalpha; - cout << "----------------------------" << endl; - cout << "----- File Information -----" << endl; - cout << "----------------------------" << endl; - cout << "--> Has Video: " << info.has_video << endl; - cout << "--> Has Audio: " << info.has_audio << endl; - cout << "--> Has Single Image: " << info.has_single_image << endl; - cout << "--> Duration: " << info.duration << " Seconds" << endl; - cout << "--> File Size: " << double(info.file_size) / 1024 / 1024 << " MB" << endl; - cout << "----------------------------" << endl; - cout << "----- Video Attributes -----" << endl; - cout << "----------------------------" << endl; - cout << "--> Width: " << info.width << endl; - cout << "--> Height: " << info.height << endl; - cout << "--> Pixel Format: " << info.pixel_format << endl; - cout << "--> Frames Per Second: " << info.fps.ToDouble() << " (" << info.fps.num << "/" << info.fps.den << ")" << endl; - cout << "--> Video Bit Rate: " << info.video_bit_rate/1000 << " kb/s" << endl; - cout << "--> Pixel Ratio: " << info.pixel_ratio.ToDouble() << " (" << info.pixel_ratio.num << "/" << info.pixel_ratio.den << ")" << endl; - cout << "--> Display Aspect Ratio: " << info.display_ratio.ToDouble() << " (" << info.display_ratio.num << "/" << info.display_ratio.den << ")" << endl; - cout << "--> Video Codec: " << info.vcodec << endl; - cout << "--> Video Length: " << info.video_length << " Frames" << endl; - cout << "--> Video Stream Index: " << info.video_stream_index << endl; - cout << "--> Video Timebase: " << info.video_timebase.ToDouble() << " (" << info.video_timebase.num << "/" << info.video_timebase.den << ")" << endl; - cout << "--> Interlaced: " << info.interlaced_frame << endl; - cout << "--> Interlaced: Top Field First: " << info.top_field_first << endl; - cout << "----------------------------" << endl; - cout << "----- Audio Attributes -----" << endl; - cout << "----------------------------" << endl; - cout << "--> Audio Codec: " << info.acodec << endl; - cout << "--> Audio Bit Rate: " << info.audio_bit_rate/1000 << " kb/s" << endl; - cout << "--> Sample Rate: " << info.sample_rate << " Hz" << endl; - cout << "--> # of Channels: " << info.channels << endl; - cout << "--> Channel Layout: " << info.channel_layout << endl; - cout << "--> Audio Stream Index: " << info.audio_stream_index << endl; - cout << "--> Audio Timebase: " << info.audio_timebase.ToDouble() << " (" << info.audio_timebase.num << "/" << info.audio_timebase.den << ")" << endl; - cout << "----------------------------" << endl; + std::cout << std::fixed << std::setprecision(2) << std::boolalpha; + std::cout << "----------------------------" << std::endl; + std::cout << "----- File Information -----" << std::endl; + std::cout << "----------------------------" << std::endl; + std::cout << "--> Has Video: " << info.has_video << std::endl; + std::cout << "--> Has Audio: " << info.has_audio << std::endl; + std::cout << "--> Has Single Image: " << info.has_single_image << std::endl; + std::cout << "--> Duration: " << info.duration << " Seconds" << std::endl; + std::cout << "--> File Size: " << double(info.file_size) / 1024 / 1024 << " MB" << std::endl; + std::cout << "----------------------------" << std::endl; + std::cout << "----- Video Attributes -----" << std::endl; + std::cout << "----------------------------" << std::endl; + std::cout << "--> Width: " << info.width << std::endl; + std::cout << "--> Height: " << info.height << std::endl; + std::cout << "--> Pixel Format: " << info.pixel_format << std::endl; + std::cout << "--> Frames Per Second: " << info.fps.ToDouble() << " (" << info.fps.num << "/" << info.fps.den << ")" << std::endl; + std::cout << "--> Video Bit Rate: " << info.video_bit_rate/1000 << " kb/s" << std::endl; + std::cout << "--> Pixel Ratio: " << info.pixel_ratio.ToDouble() << " (" << info.pixel_ratio.num << "/" << info.pixel_ratio.den << ")" << std::endl; + std::cout << "--> Display Aspect Ratio: " << info.display_ratio.ToDouble() << " (" << info.display_ratio.num << "/" << info.display_ratio.den << ")" << std::endl; + std::cout << "--> Video Codec: " << info.vcodec << std::endl; + std::cout << "--> Video Length: " << info.video_length << " Frames" << std::endl; + std::cout << "--> Video Stream Index: " << info.video_stream_index << std::endl; + std::cout << "--> Video Timebase: " << info.video_timebase.ToDouble() << " (" << info.video_timebase.num << "/" << info.video_timebase.den << ")" << std::endl; + std::cout << "--> Interlaced: " << info.interlaced_frame << std::endl; + std::cout << "--> Interlaced: Top Field First: " << info.top_field_first << std::endl; + std::cout << "----------------------------" << std::endl; + std::cout << "----- Audio Attributes -----" << std::endl; + std::cout << "----------------------------" << std::endl; + std::cout << "--> Audio Codec: " << info.acodec << std::endl; + std::cout << "--> Audio Bit Rate: " << info.audio_bit_rate/1000 << " kb/s" << std::endl; + std::cout << "--> Sample Rate: " << info.sample_rate << " Hz" << std::endl; + std::cout << "--> # of Channels: " << info.channels << std::endl; + std::cout << "--> Channel Layout: " << info.channel_layout << std::endl; + std::cout << "--> Audio Stream Index: " << info.audio_stream_index << std::endl; + std::cout << "--> Audio Timebase: " << info.audio_timebase.ToDouble() << " (" << info.audio_timebase.num << "/" << info.audio_timebase.den << ")" << std::endl; + std::cout << "----------------------------" << std::endl; } // Generate JSON string of this object -string WriterBase::Json() { +std::string WriterBase::Json() const { // Return formatted string return JsonValue().toStyledString(); } -// Generate Json::JsonValue for this object -Json::Value WriterBase::JsonValue() { +// Generate Json::Value for this object +Json::Value WriterBase::JsonValue() const { // Create root json object Json::Value root; @@ -151,7 +154,7 @@ root["has_audio"] = info.has_audio; root["has_single_image"] = info.has_single_image; root["duration"] = info.duration; - stringstream filesize_stream; + std::stringstream filesize_stream; filesize_stream << info.file_size; root["file_size"] = filesize_stream.str(); root["height"] = info.height; @@ -168,7 +171,7 @@ root["display_ratio"]["num"] = info.display_ratio.num; root["display_ratio"]["den"] = info.display_ratio.den; root["vcodec"] = info.vcodec; - stringstream video_length_stream; + std::stringstream video_length_stream; video_length_stream << info.video_length; root["video_length"] = video_length_stream.str(); root["video_stream_index"] = info.video_stream_index; @@ -192,30 +195,24 @@ } // Load JSON string into this object -void WriterBase::SetJson(string value) { +void WriterBase::SetJson(const std::string value) { // Parse JSON string into JSON objects - Json::Value root; - Json::Reader reader; - bool success = reader.parse( value, root ); - if (!success) - // Raise exception - throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); - try { + const Json::Value root = openshot::stringToJson(value); // Set all values that match SetJsonValue(root); } - catch (exception e) + catch (const std::exception& e) { // Error parsing JSON (or missing keys) - throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); } } -// Load Json::JsonValue into this object -void WriterBase::SetJsonValue(Json::Value root) { +// Load Json::Value into this object +void WriterBase::SetJsonValue(const Json::Value root) { // Set data from Json (if key is found) if (!root["has_video"].isNull()) diff -Nru libopenshot-0.2.2+dfsg1/src/ZmqLogger.cpp libopenshot-0.2.5+dfsg1/src/ZmqLogger.cpp --- libopenshot-0.2.2+dfsg1/src/ZmqLogger.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/src/ZmqLogger.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source file for ZeroMQ-based Logger class * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -27,6 +30,10 @@ #include "../include/ZmqLogger.h" +#if USE_RESVG == 1 + #include "ResvgQt.h" +#endif + using namespace std; using namespace openshot; @@ -51,6 +58,13 @@ // Init enabled to False (force user to call Enable()) m_pInstance->enabled = false; + + #if USE_RESVG == 1 + // Init resvg logging (if needed) + // This can only happen 1 time or it will crash + ResvgRenderer::initLog(); + #endif + } return m_pInstance; @@ -146,6 +160,9 @@ void ZmqLogger::Close() { + // Disable logger as it no longer needed + enabled = false; + // Close file (if already open) if (log_file.is_open()) log_file.close(); @@ -159,12 +176,13 @@ } // Append debug information -void ZmqLogger::AppendDebugMethod(string method_name, string arg1_name, float arg1_value, - string arg2_name, float arg2_value, - string arg3_name, float arg3_value, - string arg4_name, float arg4_value, - string arg5_name, float arg5_value, - string arg6_name, float arg6_value) +void ZmqLogger::AppendDebugMethod(string method_name, + string arg1_name, float arg1_value, + string arg2_name, float arg2_value, + string arg3_name, float arg3_value, + string arg4_name, float arg4_value, + string arg5_name, float arg5_value, + string arg6_name, float arg6_value) { if (!enabled) // Don't do anything @@ -203,4 +221,4 @@ // Send message through ZMQ Log(message.str()); } -} \ No newline at end of file +} diff -Nru libopenshot-0.2.2+dfsg1/tests/Cache_Tests.cpp libopenshot-0.2.5+dfsg1/tests/Cache_Tests.cpp --- libopenshot-0.2.2+dfsg1/tests/Cache_Tests.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/tests/Cache_Tests.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Unit tests for openshot::Cache * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -26,10 +29,11 @@ */ #include "UnitTest++.h" +// Prevent name clashes with juce::UnitTest +#define DONT_SET_USING_JUCE_NAMESPACE 1 #include "../include/OpenShot.h" #include "../include/Json.h" -using namespace std; using namespace openshot; TEST(Cache_Default_Constructor) @@ -391,31 +395,31 @@ // Add some frames (out of order) std::shared_ptr f3(new Frame(3, 1280, 720, "Blue", 500, 2)); c.Add(f3); - CHECK_EQUAL(1, c.JsonValue()["ranges"].size()); + CHECK_EQUAL(1, (int)c.JsonValue()["ranges"].size()); CHECK_EQUAL("1", c.JsonValue()["version"].asString()); // Add some frames (out of order) std::shared_ptr f1(new Frame(1, 1280, 720, "Blue", 500, 2)); c.Add(f1); - CHECK_EQUAL(2, c.JsonValue()["ranges"].size()); + CHECK_EQUAL(2, (int)c.JsonValue()["ranges"].size()); CHECK_EQUAL("2", c.JsonValue()["version"].asString()); // Add some frames (out of order) std::shared_ptr f2(new Frame(2, 1280, 720, "Blue", 500, 2)); c.Add(f2); - CHECK_EQUAL(1, c.JsonValue()["ranges"].size()); + CHECK_EQUAL(1, (int)c.JsonValue()["ranges"].size()); CHECK_EQUAL("3", c.JsonValue()["version"].asString()); // Add some frames (out of order) std::shared_ptr f5(new Frame(5, 1280, 720, "Blue", 500, 2)); c.Add(f5); - CHECK_EQUAL(2, c.JsonValue()["ranges"].size()); + CHECK_EQUAL(2, (int)c.JsonValue()["ranges"].size()); CHECK_EQUAL("4", c.JsonValue()["version"].asString()); // Add some frames (out of order) std::shared_ptr f4(new Frame(4, 1280, 720, "Blue", 500, 2)); c.Add(f4); - CHECK_EQUAL(1, c.JsonValue()["ranges"].size()); + CHECK_EQUAL(1, (int)c.JsonValue()["ranges"].size()); CHECK_EQUAL("5", c.JsonValue()["version"].asString()); // Delete cache directory @@ -431,31 +435,31 @@ // Add some frames (out of order) std::shared_ptr f3(new Frame(3, 1280, 720, "Blue", 500, 2)); c.Add(f3); - CHECK_EQUAL(1, c.JsonValue()["ranges"].size()); + CHECK_EQUAL(1, (int)c.JsonValue()["ranges"].size()); CHECK_EQUAL("1", c.JsonValue()["version"].asString()); // Add some frames (out of order) std::shared_ptr f1(new Frame(1, 1280, 720, "Blue", 500, 2)); c.Add(f1); - CHECK_EQUAL(2, c.JsonValue()["ranges"].size()); + CHECK_EQUAL(2, (int)c.JsonValue()["ranges"].size()); CHECK_EQUAL("2", c.JsonValue()["version"].asString()); // Add some frames (out of order) std::shared_ptr f2(new Frame(2, 1280, 720, "Blue", 500, 2)); c.Add(f2); - CHECK_EQUAL(1, c.JsonValue()["ranges"].size()); + CHECK_EQUAL(1, (int)c.JsonValue()["ranges"].size()); CHECK_EQUAL("3", c.JsonValue()["version"].asString()); // Add some frames (out of order) std::shared_ptr f5(new Frame(5, 1280, 720, "Blue", 500, 2)); c.Add(f5); - CHECK_EQUAL(2, c.JsonValue()["ranges"].size()); + CHECK_EQUAL(2, (int)c.JsonValue()["ranges"].size()); CHECK_EQUAL("4", c.JsonValue()["version"].asString()); // Add some frames (out of order) std::shared_ptr f4(new Frame(4, 1280, 720, "Blue", 500, 2)); c.Add(f4); - CHECK_EQUAL(1, c.JsonValue()["ranges"].size()); + CHECK_EQUAL(1, (int)c.JsonValue()["ranges"].size()); CHECK_EQUAL("5", c.JsonValue()["version"].asString()); -} \ No newline at end of file +} diff -Nru libopenshot-0.2.2+dfsg1/tests/Clip_Tests.cpp libopenshot-0.2.5+dfsg1/tests/Clip_Tests.cpp --- libopenshot-0.2.2+dfsg1/tests/Clip_Tests.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/tests/Clip_Tests.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Unit tests for openshot::Clip * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -26,8 +29,9 @@ */ #include "UnitTest++.h" +// Prevent name clashes with juce::UnitTest +#define DONT_SET_USING_JUCE_NAMESPACE 1 #include "../include/OpenShot.h" -#include "../include/Tests.h" using namespace std; using namespace openshot; @@ -110,11 +114,15 @@ // Parse JSON string into JSON objects Json::Value root; - Json::Reader reader; - bool success = reader.parse( properties, root ); + Json::CharReaderBuilder rbuilder; + Json::CharReader* reader(rbuilder.newCharReader()); + string errors; + bool success = reader->parse( properties.c_str(), + properties.c_str() + properties.size(), &root, &errors ); + if (!success) // Raise exception - throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); + throw InvalidJSON("JSON could not be parsed (or is invalid)"); try { @@ -123,10 +131,10 @@ CHECK_EQUAL(true, root["alpha"]["keyframe"].asBool()); } - catch (exception e) + catch (const std::exception& e) { // Error parsing JSON (or missing keys) - throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); } @@ -135,10 +143,11 @@ // Parse JSON string into JSON objects root.clear(); - success = reader.parse( properties, root ); + success = reader->parse( properties.c_str(), + properties.c_str() + properties.size(), &root, &errors ); if (!success) // Raise exception - throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); + throw InvalidJSON("JSON could not be parsed (or is invalid)"); try { @@ -147,10 +156,10 @@ CHECK_EQUAL(false, root["alpha"]["keyframe"].asBool()); } - catch (exception e) + catch (const std::exception& e) { // Error parsing JSON (or missing keys) - throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); } @@ -159,10 +168,11 @@ // Parse JSON string into JSON objects root.clear(); - success = reader.parse( properties, root ); + success = reader->parse( properties.c_str(), + properties.c_str() + properties.size(), &root, &errors ); if (!success) // Raise exception - throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); + throw InvalidJSON("JSON could not be parsed (or is invalid)"); try { @@ -170,10 +180,10 @@ CHECK_EQUAL(false, root["alpha"]["keyframe"].asBool()); } - catch (exception e) + catch (const std::exception& e) { // Error parsing JSON (or missing keys) - throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); } @@ -182,10 +192,11 @@ // Parse JSON string into JSON objects root.clear(); - success = reader.parse( properties, root ); + success = reader->parse( properties.c_str(), + properties.c_str() + properties.size(), &root, &errors ); if (!success) // Raise exception - throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); + throw InvalidJSON("JSON could not be parsed (or is invalid)"); try { @@ -194,12 +205,15 @@ CHECK_EQUAL(true, root["alpha"]["keyframe"].asBool()); } - catch (exception e) + catch (const std::exception& e) { // Error parsing JSON (or missing keys) - throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); } + + // Free up the reader we allocated + delete reader; } TEST(Clip_Effects) @@ -227,7 +241,7 @@ CHECK_EQUAL(255, (int)pixels[pixel_index + 3]); // Check the # of Effects - CHECK_EQUAL(1, c10.Effects().size()); + CHECK_EQUAL(1, (int)c10.Effects().size()); // Add a 2nd negate effect @@ -248,5 +262,5 @@ CHECK_EQUAL(255, (int)pixels[pixel_index + 3]); // Check the # of Effects - CHECK_EQUAL(2, c10.Effects().size()); + CHECK_EQUAL(2, (int)c10.Effects().size()); } diff -Nru libopenshot-0.2.2+dfsg1/tests/CMakeLists.txt libopenshot-0.2.5+dfsg1/tests/CMakeLists.txt --- libopenshot-0.2.2+dfsg1/tests/CMakeLists.txt 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/tests/CMakeLists.txt 2020-03-03 08:00:06.000000000 +0000 @@ -4,10 +4,10 @@ # # @section LICENSE # -# Copyright (c) 2008-2014 OpenShot Studios, LLC +# Copyright (c) 2008-2019 OpenShot Studios, LLC # . This file is part of -# OpenShot Library (libopenshot), an open-source project dedicated to -# delivering high quality video editing and animation solutions to the +# OpenShot Library (libopenshot), an open-source project dedicated to +# delivering high quality video editing and animation solutions to the # world. For more information visit . # # OpenShot Library (libopenshot) is free software: you can redistribute it @@ -24,167 +24,129 @@ # along with OpenShot Library. If not, see . ################################################################################ -SET(TEST_MEDIA_PATH "${openshot_SOURCE_DIR}/src/examples/") +# Test media path, used by unit tests for input data +file(TO_NATIVE_PATH "${PROJECT_SOURCE_DIR}/src/examples/" TEST_MEDIA_PATH) +add_definitions( -DTEST_MEDIA_PATH="${TEST_MEDIA_PATH}" ) ################ WINDOWS ################## # Set some compiler options for Windows # required for libopenshot-audio headers -IF (WIN32) - STRING(REPLACE "/" "\\\\" TEST_MEDIA_PATH TEST_MEDIA_PATH) +if(WIN32) add_definitions( -DIGNORE_JUCE_HYPOT=1 ) - SET(CMAKE_CXX_FLAGS " ${CMAKE_CXX_FLAGS} -include cmath") -ENDIF(WIN32) - -add_definitions( -DTEST_MEDIA_PATH="${TEST_MEDIA_PATH}" ) + set(CMAKE_CXX_FLAGS " ${CMAKE_CXX_FLAGS} -include cmath") +endif() ################### UNITTEST++ ##################### # Find UnitTest++ libraries (used for unit testing) -FIND_PACKAGE(UnitTest++ REQUIRED) +find_package(UnitTest++) + +if (NOT UnitTest++_FOUND) + set(TESTS_ENABLED OFF PARENT_SCOPE) + return() +endif() # Include UnitTest++ headers (needed for compile) -include_directories(${UNITTEST++_INCLUDE_DIR}) +include_directories(${UnitTest++_INCLUDE_DIRS}) + +set_package_properties(UnitTest++ PROPERTIES + TYPE RECOMMENDED + PURPOSE "Unit testing framework") ################ IMAGE MAGICK ################## # Set the Quantum Depth that ImageMagick was built with (default to 16 bits) -IF (MAGICKCORE_QUANTUM_DEPTH) +if(MAGICKCORE_QUANTUM_DEPTH) add_definitions( -DMAGICKCORE_QUANTUM_DEPTH=${MAGICKCORE_QUANTUM_DEPTH} ) -ELSE (MAGICKCORE_QUANTUM_DEPTH) +else() add_definitions( -DMAGICKCORE_QUANTUM_DEPTH=16 ) -ENDIF (MAGICKCORE_QUANTUM_DEPTH) -IF (MAGICKCORE_HDRI_ENABLE) +endif() + +if(MAGICKCORE_HDRI_ENABLE) add_definitions( -DMAGICKCORE_HDRI_ENABLE=${MAGICKCORE_HDRI_ENABLE} ) -ELSE (MAGICKCORE_HDRI_ENABLE) +else() add_definitions( -DMAGICKCORE_HDRI_ENABLE=0 ) -ENDIF (MAGICKCORE_HDRI_ENABLE) -IF (OPENSHOT_IMAGEMAGICK_COMPATIBILITY) +endif() + +if(OPENSHOT_IMAGEMAGICK_COMPATIBILITY) add_definitions( -DOPENSHOT_IMAGEMAGICK_COMPATIBILITY=${OPENSHOT_IMAGEMAGICK_COMPATIBILITY} ) -ELSE (OPENSHOT_IMAGEMAGICK_COMPATIBILITY) +else() add_definitions( -DOPENSHOT_IMAGEMAGICK_COMPATIBILITY=0 ) -ENDIF (OPENSHOT_IMAGEMAGICK_COMPATIBILITY) +endif() # Find the ImageMagick++ library -FIND_PACKAGE(ImageMagick COMPONENTS Magick++ MagickWand MagickCore) -IF (ImageMagick_FOUND) +find_package(ImageMagick COMPONENTS Magick++ MagickWand MagickCore) +if(ImageMagick_FOUND) # Include ImageMagick++ headers (needed for compile) include_directories(${ImageMagick_INCLUDE_DIRS}) # define a global var (used in the C++) add_definitions( -DUSE_IMAGEMAGICK=1 ) - SET(CMAKE_SWIG_FLAGS "-DUSE_IMAGEMAGICK=1") - -ENDIF (ImageMagick_FOUND) - -################### FFMPEG ##################### -# Find FFmpeg libraries (used for video encoding / decoding) -FIND_PACKAGE(FFmpeg REQUIRED) - -# Include FFmpeg headers (needed for compile) -include_directories(${FFMPEG_INCLUDE_DIR}) + set(CMAKE_SWIG_FLAGS "-DUSE_IMAGEMAGICK=1") +endif() ################# LIBOPENSHOT-AUDIO ################### # Find JUCE-based openshot Audio libraries -FIND_PACKAGE(OpenShotAudio REQUIRED) +find_package(OpenShotAudio 0.2.0 REQUIRED) # Include Juce headers (needed for compile) include_directories(${LIBOPENSHOT_AUDIO_INCLUDE_DIRS}) -################# QT5 ################### -# Find QT5 libraries -find_package(Qt5Widgets REQUIRED) -find_package(Qt5Core REQUIRED) -find_package(Qt5Gui REQUIRED) -find_package(Qt5Multimedia REQUIRED) -find_package(Qt5MultimediaWidgets REQUIRED) - -# Include Qt headers (needed for compile) -include_directories(${Qt5Widgets_INCLUDE_DIRS}) -include_directories(${Qt5Core_INCLUDE_DIRS}) -include_directories(${Qt5Gui_INCLUDE_DIRS}) -include_directories(${Qt5Multimedia_INCLUDE_DIRS}) -include_directories(${Qt5MultimediaWidgets_INCLUDE_DIRS}) - -add_definitions(${Qt5Widgets_DEFINITIONS}) -add_definitions(${Qt5Core_DEFINITIONS}) -add_definitions(${Qt5Gui_DEFINITIONS}) -add_definitions(${Qt5Multimedia_DEFINITIONS}) -add_definitions(${Qt5MultimediaWidgets_DEFINITIONS}) - -SET(QT_LIBRARIES ${Qt5Widgets_LIBRARIES} - ${Qt5Core_LIBRARIES} - ${Qt5Gui_LIBRARIES} - ${Qt5Multimedia_LIBRARIES} - ${Qt5MultimediaWidgets_LIBRARIES}) - -# Set compiler flags for Qt -set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${Qt5Widgets_EXECUTABLE_COMPILE_FLAGS} ") -set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${Qt5Core_EXECUTABLE_COMPILE_FLAGS} ") -set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${Qt5Gui_EXECUTABLE_COMPILE_FLAGS} ") -set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${Qt5Multimedia_EXECUTABLE_COMPILE_FLAGS} ") -set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${Qt5MultimediaWidgets_EXECUTABLE_COMPILE_FLAGS} ") -set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -g -ggdb ") - -# Manually moc Qt files -qt5_wrap_cpp(MOC_FILES ${QT_HEADER_FILES}) - ################# BLACKMAGIC DECKLINK ################### -IF (ENABLE_BLACKMAGIC) +if(ENABLE_BLACKMAGIC) # Find BlackMagic DeckLinkAPI libraries - FIND_PACKAGE(BlackMagic) - - IF (BLACKMAGIC_FOUND) + find_package(BlackMagic) + + if(BLACKMAGIC_FOUND) # Include Blackmagic headers (needed for compile) include_directories(${BLACKMAGIC_INCLUDE_DIR}) - ENDIF (BLACKMAGIC_FOUND) -ENDIF (ENABLE_BLACKMAGIC) + endif() +endif() + -################### OPENMP ##################### -# Check for OpenMP (used for multi-core processing) -FIND_PACKAGE(OpenMP) - -if (OPENMP_FOUND) - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${OpenMP_CXX_FLAGS} ") -endif(OPENMP_FOUND) - -################### ZEROMQ ##################### -# Find ZeroMQ library (used for socket communication & logging) -FIND_PACKAGE(ZMQ REQUIRED) - -# Include FFmpeg headers (needed for compile) -include_directories(${ZMQ_INCLUDE_DIRS}) - -################### JSONCPP ##################### -# Include jsoncpp headers (needed for JSON parsing) -include_directories("../thirdparty/jsoncpp/include") - -IF (NOT DISABLE_TESTS) - ############### SET TEST SOURCE FILES ################# - SET ( OPENSHOT_TEST_FILES tests.cpp - Cache_Tests.cpp - Clip_Tests.cpp - Color_Tests.cpp - Coordinate_Tests.cpp - ReaderBase_Tests.cpp - ImageWriter_Tests.cpp - FFmpegReader_Tests.cpp - FFmpegWriter_Tests.cpp - Fraction_Tests.cpp - FrameMapper_Tests.cpp - KeyFrame_Tests.cpp - Point_Tests.cpp - Timeline_Tests.cpp ) - - ################ TESTER EXECUTABLE ################# - # Create unit test executable (openshot-test) - add_executable(openshot-test - tests.cpp - ${OPENSHOT_TEST_FILES} ) - - # Link libraries to the new executable - target_link_libraries(openshot-test openshot ${UNITTEST++_LIBRARY}) - - - #################### MAKE TEST ###################### - # Hook up the 'make test' target to the 'openshot-test' executable - ADD_CUSTOM_TARGET(test ${CMAKE_CURRENT_BINARY_DIR}/openshot-test) -ENDIF (NOT DISABLE_TESTS) +############### SET TEST SOURCE FILES ################# +set(OPENSHOT_TEST_FILES + Cache_Tests.cpp + Clip_Tests.cpp + Color_Tests.cpp + Coordinate_Tests.cpp + ReaderBase_Tests.cpp + ImageWriter_Tests.cpp + FFmpegReader_Tests.cpp + FFmpegWriter_Tests.cpp + Fraction_Tests.cpp + Frame_Tests.cpp + FrameMapper_Tests.cpp + KeyFrame_Tests.cpp + Point_Tests.cpp + Settings_Tests.cpp + Timeline_Tests.cpp ) + +################ TESTER EXECUTABLE ################# +# Create unit test executable (openshot-test) +message (STATUS "Tests enabled, test executable will be built as tests/openshot-test") +add_executable(openshot-test + tests.cpp + ${OPENSHOT_TEST_FILES} ) + +# Link libraries to the new executable +target_link_libraries(openshot-test openshot ${UnitTest++_LIBRARIES}) + +##### RUNNING TESTS (make os_test / make test) ##### +# Hook up the 'make os_test' target to the 'openshot-test' executable +add_custom_target(os_test COMMAND openshot-test) + +# Also hook up 'make test', if possible +# This requires CMake 3.11+, where the CMP0037 policy +# configured to 'NEW' mode will not reserve target names +# unless the corresponding feature is actually used +if (POLICY CMP0037) + cmake_policy(SET CMP0037 NEW) +endif() +if (CMAKE_VERSION VERSION_GREATER 3.11) + message(STATUS "Cmake 3.11+ detected, enabling 'test' target") + add_custom_target(test COMMAND openshot-test) + set(TEST_TARGET_NAME "test") +else() + set(TEST_TARGET_NAME "os_test") +endif() +add_feature_info("Testrunner" ENABLE_TESTS "Run unit tests with 'make ${TEST_TARGET_NAME}'") diff -Nru libopenshot-0.2.2+dfsg1/tests/Color_Tests.cpp libopenshot-0.2.5+dfsg1/tests/Color_Tests.cpp --- libopenshot-0.2.2+dfsg1/tests/Color_Tests.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/tests/Color_Tests.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Unit tests for openshot::Color * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -26,6 +29,8 @@ */ #include "UnitTest++.h" +// Prevent name clashes with juce::UnitTest +#define DONT_SET_USING_JUCE_NAMESPACE 1 #include "../include/OpenShot.h" using namespace std; @@ -74,7 +79,7 @@ c.blue.AddPoint(100, 255); CHECK_EQUAL("#000000", c.GetColorHex(1)); - CHECK_EQUAL("#7f7f7f", c.GetColorHex(50)); + CHECK_EQUAL("#7d7d7d", c.GetColorHex(50)); CHECK_EQUAL("#ffffff", c.GetColorHex(100)); } @@ -88,7 +93,7 @@ c.blue.AddPoint(100, 255); CHECK_EQUAL("#4586db", c.GetColorHex(1)); - CHECK_EQUAL("#a2c2ed", c.GetColorHex(50)); + CHECK_EQUAL("#a0c1ed", c.GetColorHex(50)); CHECK_EQUAL("#ffffff", c.GetColorHex(100)); } @@ -113,7 +118,7 @@ c.blue.AddPoint(100, 255); CHECK_EQUAL("#4586db", c.GetColorHex(1)); - CHECK_EQUAL("#a2c2ed", c.GetColorHex(50)); + CHECK_EQUAL("#a0c1ed", c.GetColorHex(50)); CHECK_EQUAL("#ffffff", c.GetColorHex(100)); // Color with alpha diff -Nru libopenshot-0.2.2+dfsg1/tests/Coordinate_Tests.cpp libopenshot-0.2.5+dfsg1/tests/Coordinate_Tests.cpp --- libopenshot-0.2.2+dfsg1/tests/Coordinate_Tests.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/tests/Coordinate_Tests.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Unit tests for openshot::Coordinate * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -26,6 +29,8 @@ */ #include "UnitTest++.h" +// Prevent name clashes with juce::UnitTest +#define DONT_SET_USING_JUCE_NAMESPACE 1 #include "../include/OpenShot.h" using namespace std; diff -Nru libopenshot-0.2.2+dfsg1/tests/FFmpegReader_Tests.cpp libopenshot-0.2.5+dfsg1/tests/FFmpegReader_Tests.cpp --- libopenshot-0.2.2+dfsg1/tests/FFmpegReader_Tests.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/tests/FFmpegReader_Tests.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Unit tests for openshot::FFmpegReader * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -26,8 +29,9 @@ */ #include "UnitTest++.h" +// Prevent name clashes with juce::UnitTest +#define DONT_SET_USING_JUCE_NAMESPACE 1 #include "../include/OpenShot.h" -#include "../include/Tests.h" using namespace std; using namespace openshot; @@ -95,10 +99,14 @@ int pixel_index = 112 * 4; // pixel 112 (4 bytes per pixel) // Check image properties on scanline 10, pixel 112 - CHECK_EQUAL(21, (int)pixels[pixel_index]); - CHECK_EQUAL(191, (int)pixels[pixel_index + 1]); - CHECK_EQUAL(0, (int)pixels[pixel_index + 2]); - CHECK_EQUAL(255, (int)pixels[pixel_index + 3]); + CHECK_CLOSE(21, (int)pixels[pixel_index], 5); + CHECK_CLOSE(191, (int)pixels[pixel_index + 1], 5); + CHECK_CLOSE(0, (int)pixels[pixel_index + 2], 5); + CHECK_CLOSE(255, (int)pixels[pixel_index + 3], 5); + + // Check pixel function + CHECK_EQUAL(true, f->CheckPixel(10, 112, 21, 191, 0, 255, 5)); + CHECK_EQUAL(false, f->CheckPixel(10, 112, 0, 0, 0, 0, 5)); // Get frame 1 f = r.GetFrame(2); @@ -108,10 +116,14 @@ pixel_index = 112 * 4; // pixel 112 (4 bytes per pixel) // Check image properties on scanline 10, pixel 112 - CHECK_EQUAL(0, (int)pixels[pixel_index]); - CHECK_EQUAL(96, (int)pixels[pixel_index + 1]); - CHECK_EQUAL(188, (int)pixels[pixel_index + 2]); - CHECK_EQUAL(255, (int)pixels[pixel_index + 3]); + CHECK_CLOSE(0, (int)pixels[pixel_index], 5); + CHECK_CLOSE(96, (int)pixels[pixel_index + 1], 5); + CHECK_CLOSE(188, (int)pixels[pixel_index + 2], 5); + CHECK_CLOSE(255, (int)pixels[pixel_index + 3], 5); + + // Check pixel function + CHECK_EQUAL(true, f->CheckPixel(10, 112, 0, 96, 188, 255, 5)); + CHECK_EQUAL(false, f->CheckPixel(10, 112, 0, 0, 0, 0, 5)); // Close reader r.Close(); @@ -209,4 +221,3 @@ // Close reader r.Close(); } - diff -Nru libopenshot-0.2.2+dfsg1/tests/FFmpegWriter_Tests.cpp libopenshot-0.2.5+dfsg1/tests/FFmpegWriter_Tests.cpp --- libopenshot-0.2.2+dfsg1/tests/FFmpegWriter_Tests.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/tests/FFmpegWriter_Tests.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Unit tests for openshot::FFmpegWriter * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -26,13 +29,15 @@ */ #include "UnitTest++.h" +// Prevent name clashes with juce::UnitTest +#define DONT_SET_USING_JUCE_NAMESPACE 1 #include "../include/OpenShot.h" -#include "../include/Tests.h" using namespace std; using namespace openshot; -TEST(FFmpegWriter_Test_Webm) +SUITE(FFMpegWriter) { +TEST(Webm) { // Reader stringstream path; @@ -73,8 +78,51 @@ int pixel_index = 112 * 4; // pixel 112 (4 bytes per pixel) // Check image properties on scanline 10, pixel 112 - CHECK_EQUAL(23, (int)pixels[pixel_index]); - CHECK_EQUAL(23, (int)pixels[pixel_index + 1]); - CHECK_EQUAL(23, (int)pixels[pixel_index + 2]); - CHECK_EQUAL(255, (int)pixels[pixel_index + 3]); + CHECK_CLOSE(23, (int)pixels[pixel_index], 5); + CHECK_CLOSE(23, (int)pixels[pixel_index + 1], 5); + CHECK_CLOSE(23, (int)pixels[pixel_index + 2], 5); + CHECK_CLOSE(255, (int)pixels[pixel_index + 3], 5); } + +TEST(Options_Overloads) +{ + // Reader + stringstream path; + path << TEST_MEDIA_PATH << "sintel_trailer-720p.mp4"; + FFmpegReader r(path.str()); + r.Open(); + + /* WRITER ---------------- */ + FFmpegWriter w("output1.mp4"); + + // Set options + w.SetAudioOptions("aac", 48000, 192000); + w.SetVideoOptions("libx264", 1280, 720, Fraction(30,1), 5000000); + + // Open writer + w.Open(); + + // Write some frames + w.WriteFrame(&r, 24, 50); + + // Close writer & reader + w.Close(); + r.Close(); + + FFmpegReader r1("output1.mp4"); + r1.Open(); + + // Verify implied settings + CHECK_EQUAL(true, r1.info.has_audio); + CHECK_EQUAL(true, r1.info.has_video); + + CHECK_EQUAL(2, r1.GetFrame(1)->GetAudioChannelsCount()); + CHECK_EQUAL(LAYOUT_STEREO, r1.info.channel_layout); + + CHECK_EQUAL(1, r1.info.pixel_ratio.num); + CHECK_EQUAL(1, r1.info.pixel_ratio.den); + CHECK_EQUAL(false, r1.info.interlaced_frame); + CHECK_EQUAL(true, r1.info.top_field_first); +} + +} // SUITE() diff -Nru libopenshot-0.2.2+dfsg1/tests/Fraction_Tests.cpp libopenshot-0.2.5+dfsg1/tests/Fraction_Tests.cpp --- libopenshot-0.2.2+dfsg1/tests/Fraction_Tests.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/tests/Fraction_Tests.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Unit tests for openshot::Fraction * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -26,6 +29,8 @@ */ #include "UnitTest++.h" +// Prevent name clashes with juce::UnitTest +#define DONT_SET_USING_JUCE_NAMESPACE 1 #include "../include/OpenShot.h" using namespace std; diff -Nru libopenshot-0.2.2+dfsg1/tests/FrameMapper_Tests.cpp libopenshot-0.2.5+dfsg1/tests/FrameMapper_Tests.cpp --- libopenshot-0.2.2+dfsg1/tests/FrameMapper_Tests.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/tests/FrameMapper_Tests.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Unit tests for openshot::FrameMapper * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -26,8 +29,9 @@ */ #include "UnitTest++.h" +// Prevent name clashes with juce::UnitTest +#define DONT_SET_USING_JUCE_NAMESPACE 1 #include "../include/OpenShot.h" -#include "../include/Tests.h" using namespace std; using namespace openshot; diff -Nru libopenshot-0.2.2+dfsg1/tests/Frame_Tests.cpp libopenshot-0.2.5+dfsg1/tests/Frame_Tests.cpp --- libopenshot-0.2.2+dfsg1/tests/Frame_Tests.cpp 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/tests/Frame_Tests.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -0,0 +1,150 @@ +/** + * @file + * @brief Unit tests for openshot::Frame + * @author Jonathan Thomas + * @author FeRD (Frank Dana) + * + * @ref License + */ + +/* LICENSE + * + * Copyright (c) 2008-2019 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#include "UnitTest++.h" +// Prevent name clashes with juce::UnitTest +#define DONT_SET_USING_JUCE_NAMESPACE 1 +#include "../include/OpenShot.h" + +#include + +using namespace openshot; + +SUITE(Frame_Tests) +{ + +TEST(Default_Constructor) +{ + // Create a "blank" default Frame + std::shared_ptr f1(new Frame()); + + CHECK(f1 != nullptr); // Test aborts here if we didn't get a Frame + + // Check basic default parameters + CHECK_EQUAL(1, f1->GetHeight()); + CHECK_EQUAL(1, f1->GetWidth()); + CHECK_EQUAL(44100, f1->SampleRate()); + CHECK_EQUAL(2, f1->GetAudioChannelsCount()); + + // Should be false until we load or create contents + CHECK_EQUAL(false, f1->has_image_data); + CHECK_EQUAL(false, f1->has_audio_data); + + // Calling GetImage() paints a blank frame, by default + std::shared_ptr i1 = f1->GetImage(); + + CHECK(i1 != nullptr); + + CHECK_EQUAL(true,f1->has_image_data); + CHECK_EQUAL(false,f1->has_audio_data); +} + + +TEST(Data_Access) +{ + // Create a video clip + std::stringstream path; + path << TEST_MEDIA_PATH << "sintel_trailer-720p.mp4"; + Clip c1(path.str()); + c1.Open(); + + // Get first frame + std::shared_ptr f1 = c1.GetFrame(1); + + CHECK(f1 != nullptr); + + CHECK_EQUAL(1, f1->number); + CHECK_EQUAL(1280, f1->GetWidth()); + CHECK_EQUAL(720, f1->GetHeight()); +} + + +TEST(AddImage_QImage) +{ + // Create a "blank" default Frame + std::shared_ptr f1(new Frame()); + + // Load an image + std::stringstream path; + path << TEST_MEDIA_PATH << "front.png"; + std::shared_ptr i1(new QImage(QString::fromStdString(path.str()))) ; + + CHECK(f1 != nullptr); // Test aborts here if we didn't get a Frame + CHECK_EQUAL(false, i1->isNull()); + + f1->AddImage(i1); + + // Check loaded image parameters + CHECK_EQUAL(i1->height(), f1->GetHeight()); + CHECK_EQUAL(i1->width(), f1->GetWidth()); + CHECK_EQUAL(true, f1->has_image_data); +} + + +TEST(Copy_Constructor) +{ + // Create a dummy Frame + openshot::Frame f1(1, 800, 600, "#000000"); + + // Load an image + std::stringstream path; + path << TEST_MEDIA_PATH << "front.png"; + std::shared_ptr i1( new QImage(QString::fromStdString(path.str())) ); + + CHECK_EQUAL(false, i1->isNull()); + + // Add image to f1, then copy f1 to f2 + f1.AddImage(i1); + + Frame f2 = f1; + + CHECK_EQUAL(f1.GetHeight(), f2.GetHeight()); + CHECK_EQUAL(f1.GetWidth(), f2.GetWidth()); + + CHECK_EQUAL(f1.has_image_data, f2.has_image_data); + CHECK_EQUAL(f1.has_audio_data, f2.has_audio_data); + + Fraction par1 = f1.GetPixelRatio(); + Fraction par2 = f2.GetPixelRatio(); + + CHECK_EQUAL(par1.num, par2.num); + CHECK_EQUAL(par1.den, par2.den); + + + CHECK_EQUAL(f1.SampleRate(), f2.SampleRate()); + CHECK_EQUAL(f1.GetAudioChannelsCount(), f2.GetAudioChannelsCount()); + CHECK_EQUAL(f1.ChannelsLayout(), f2.ChannelsLayout()); + + CHECK_EQUAL(f1.GetBytes(), f2.GetBytes()); + CHECK_EQUAL(f1.GetAudioSamplesCount(), f2.GetAudioSamplesCount()); +} + +} // SUITE(Frame_Tests) diff -Nru libopenshot-0.2.2+dfsg1/tests/ImageWriter_Tests.cpp libopenshot-0.2.5+dfsg1/tests/ImageWriter_Tests.cpp --- libopenshot-0.2.2+dfsg1/tests/ImageWriter_Tests.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/tests/ImageWriter_Tests.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Unit tests for openshot::ImageWriter * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -26,8 +29,9 @@ */ #include "UnitTest++.h" +// Prevent name clashes with juce::UnitTest +#define DONT_SET_USING_JUCE_NAMESPACE 1 #include "../include/OpenShot.h" -#include "../include/Tests.h" using namespace std; using namespace openshot; @@ -73,9 +77,9 @@ int pixel_index = 230 * 4; // pixel 230 (4 bytes per pixel) // Check image properties - CHECK_EQUAL(20, (int)pixels[pixel_index]); - CHECK_EQUAL(18, (int)pixels[pixel_index + 1]); - CHECK_EQUAL(11, (int)pixels[pixel_index + 2]); - CHECK_EQUAL(255, (int)pixels[pixel_index + 3]); + CHECK_CLOSE(20, (int)pixels[pixel_index], 5); + CHECK_CLOSE(18, (int)pixels[pixel_index + 1], 5); + CHECK_CLOSE(11, (int)pixels[pixel_index + 2], 5); + CHECK_CLOSE(255, (int)pixels[pixel_index + 3], 5); } -#endif \ No newline at end of file +#endif diff -Nru libopenshot-0.2.2+dfsg1/tests/KeyFrame_Tests.cpp libopenshot-0.2.5+dfsg1/tests/KeyFrame_Tests.cpp --- libopenshot-0.2.2+dfsg1/tests/KeyFrame_Tests.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/tests/KeyFrame_Tests.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Unit tests for openshot::Keyframe * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -26,6 +29,8 @@ */ #include "UnitTest++.h" +// Prevent name clashes with juce::UnitTest +#define DONT_SET_USING_JUCE_NAMESPACE 1 #include "../include/OpenShot.h" using namespace std; @@ -46,7 +51,7 @@ k1.AddPoint(openshot::Point(2,3)); CHECK_THROW(k1.GetPoint(-1), OutOfBoundsPoint); - CHECK_EQUAL(1, k1.Points.size()); + CHECK_EQUAL(1, k1.GetCount()); CHECK_CLOSE(2.0f, k1.GetPoint(0).co.X, 0.00001); CHECK_CLOSE(3.0f, k1.GetPoint(0).co.Y, 0.00001); CHECK_THROW(k1.GetPoint(1), OutOfBoundsPoint); @@ -87,13 +92,13 @@ // Spot check values from the curve CHECK_CLOSE(1.0f, kf.GetValue(-1), 0.0001); CHECK_CLOSE(1.0f, kf.GetValue(0), 0.0001); - CHECK_CLOSE(1.00023f, kf.GetValue(1), 0.0001); - CHECK_CLOSE(1.14025f, kf.GetValue(9), 0.0001); - CHECK_CLOSE(1.91492f, kf.GetValue(20), 0.0001); - CHECK_CLOSE(3.81602f, kf.GetValue(40), 0.0001); + CHECK_CLOSE(1.0f, kf.GetValue(1), 0.0001); + CHECK_CLOSE(1.12414f, kf.GetValue(9), 0.0001); + CHECK_CLOSE(1.86370f, kf.GetValue(20), 0.0001); + CHECK_CLOSE(3.79733f, kf.GetValue(40), 0.0001); CHECK_CLOSE(4.0f, kf.GetValue(50), 0.0001); // Check the expected number of values - CHECK_EQUAL(kf.Values.size(), 51); + CHECK_EQUAL(51, kf.GetLength()); } TEST(Keyframe_GetValue_For_Bezier_Curve_5_Points_40_Percent_Handle) @@ -109,14 +114,14 @@ // Spot check values from the curve CHECK_CLOSE(kf.GetValue(-1), 1.0f, 0.0001); CHECK_CLOSE(1.0f, kf.GetValue(0), 0.0001); - CHECK_CLOSE(1.00023f, kf.GetValue(1), 0.0001); - CHECK_CLOSE(2.73656f, kf.GetValue(27), 0.0001); - CHECK_CLOSE(7.55139f, kf.GetValue(77), 0.0001); - CHECK_CLOSE(4.08102f, kf.GetValue(127), 0.0001); - CHECK_CLOSE(1.77569f, kf.GetValue(177), 0.0001); + CHECK_CLOSE(1.0f, kf.GetValue(1), 0.0001); + CHECK_CLOSE(2.68197f, kf.GetValue(27), 0.0001); + CHECK_CLOSE(7.47719f, kf.GetValue(77), 0.0001); + CHECK_CLOSE(4.20468f, kf.GetValue(127), 0.0001); + CHECK_CLOSE(1.73860f, kf.GetValue(177), 0.0001); CHECK_CLOSE(3.0f, kf.GetValue(200), 0.0001); // Check the expected number of values - CHECK_EQUAL(kf.Values.size(), 201); + CHECK_EQUAL(201, kf.GetLength()); } TEST(Keyframe_GetValue_For_Bezier_Curve_5_Points_25_Percent_Handle) @@ -132,14 +137,14 @@ // Spot check values from the curve CHECK_CLOSE(1.0f, kf.GetValue(-1), 0.0001); CHECK_CLOSE(1.0f, kf.GetValue(0), 0.0001); - CHECK_CLOSE(1.00023f, kf.GetValue(1), 0.0001); - CHECK_CLOSE(2.73656f, kf.GetValue(27), 0.0001); - CHECK_CLOSE(7.55139f, kf.GetValue(77), 0.0001); - CHECK_CLOSE(4.08102f, kf.GetValue(127), 0.0001); - CHECK_CLOSE(1.77569f, kf.GetValue(177), 0.0001); + CHECK_CLOSE(1.0f, kf.GetValue(1), 0.0001); + CHECK_CLOSE(2.68197f, kf.GetValue(27), 0.0001); + CHECK_CLOSE(7.47719f, kf.GetValue(77), 0.0001); + CHECK_CLOSE(4.20468f, kf.GetValue(127), 0.0001); + CHECK_CLOSE(1.73860f, kf.GetValue(177), 0.0001); CHECK_CLOSE(3.0f, kf.GetValue(200), 0.0001); // Check the expected number of values - CHECK_EQUAL(kf.Values.size(), 201); + CHECK_EQUAL(201, kf.GetLength()); } TEST(Keyframe_GetValue_For_Linear_Curve_3_Points) @@ -159,7 +164,7 @@ CHECK_CLOSE(4.4f, kf.GetValue(40), 0.0001); CHECK_CLOSE(2.0f, kf.GetValue(50), 0.0001); // Check the expected number of values - CHECK_EQUAL(kf.Values.size(), 51); + CHECK_EQUAL(51, kf.GetLength()); } TEST(Keyframe_GetValue_For_Constant_Curve_3_Points) @@ -180,7 +185,7 @@ CHECK_CLOSE(8.0f, kf.GetValue(49), 0.0001); CHECK_CLOSE(2.0f, kf.GetValue(50), 0.0001); // Check the expected number of values - CHECK_EQUAL(kf.Values.size(), 51); + CHECK_EQUAL(51, kf.GetLength()); } TEST(Keyframe_Check_Direction_and_Repeat_Fractions) @@ -192,29 +197,29 @@ kf.AddPoint(500, 500); // Spot check values from the curve - CHECK_EQUAL(kf.GetInt(1), 500); - CHECK_EQUAL(kf.IsIncreasing(1), false); - CHECK_EQUAL(kf.GetRepeatFraction(1).num, 1); - CHECK_EQUAL(kf.GetRepeatFraction(1).den, 12); - CHECK_EQUAL(kf.GetDelta(1), 500); - - CHECK_EQUAL(kf.GetInt(24), 498); - CHECK_EQUAL(kf.IsIncreasing(24), false); - CHECK_EQUAL(kf.GetRepeatFraction(24).num, 3); - CHECK_EQUAL(kf.GetRepeatFraction(24).den, 6); - CHECK_EQUAL(kf.GetDelta(24), 0); - - CHECK_EQUAL(kf.GetLong(390), 100); - CHECK_EQUAL(kf.IsIncreasing(390), true); - CHECK_EQUAL(kf.GetRepeatFraction(390).num, 3); - CHECK_EQUAL(kf.GetRepeatFraction(390).den, 15); - CHECK_EQUAL(kf.GetDelta(390), 0); - - CHECK_EQUAL(kf.GetLong(391), 100); - CHECK_EQUAL(kf.IsIncreasing(391), true); - CHECK_EQUAL(kf.GetRepeatFraction(391).num, 4); - CHECK_EQUAL(kf.GetRepeatFraction(391).den, 15); - CHECK_EQUAL(kf.GetDelta(388), -1); + CHECK_EQUAL(500, kf.GetInt(1)); + CHECK_EQUAL(false, kf.IsIncreasing(1)); + CHECK_EQUAL(1, kf.GetRepeatFraction(1).num); + CHECK_EQUAL(13, kf.GetRepeatFraction(1).den); + CHECK_EQUAL(500, kf.GetDelta(1)); + + CHECK_EQUAL(498, kf.GetInt(24)); + CHECK_EQUAL(false, kf.IsIncreasing(24)); + CHECK_EQUAL(3, kf.GetRepeatFraction(24).num); + CHECK_EQUAL(6, kf.GetRepeatFraction(24).den); + CHECK_EQUAL(0, kf.GetDelta(24)); + + CHECK_EQUAL(100, kf.GetLong(390)); + CHECK_EQUAL(true, kf.IsIncreasing(390)); + CHECK_EQUAL(3, kf.GetRepeatFraction(390).num); + CHECK_EQUAL(16, kf.GetRepeatFraction(390).den); + CHECK_EQUAL(0, kf.GetDelta(390)); + + CHECK_EQUAL(100, kf.GetLong(391)); + CHECK_EQUAL(true, kf.IsIncreasing(391)); + CHECK_EQUAL(4, kf.GetRepeatFraction(391).num); + CHECK_EQUAL(16, kf.GetRepeatFraction(391).den); + CHECK_EQUAL(-1, kf.GetDelta(388)); } @@ -227,22 +232,22 @@ kf.AddPoint(2500, 0.0); // Spot check values from the curve (to the right) - CHECK_EQUAL(kf.GetClosestPoint(openshot::Point(900, 900)).co.X, 1000); - CHECK_EQUAL(kf.GetClosestPoint(openshot::Point(1, 1)).co.X, 1); - CHECK_EQUAL(kf.GetClosestPoint(openshot::Point(5, 5)).co.X, 1000); - CHECK_EQUAL(kf.GetClosestPoint(openshot::Point(1000, 1000)).co.X, 1000); - CHECK_EQUAL(kf.GetClosestPoint(openshot::Point(1001, 1001)).co.X, 2500); - CHECK_EQUAL(kf.GetClosestPoint(openshot::Point(2500, 2500)).co.X, 2500); - CHECK_EQUAL(kf.GetClosestPoint(openshot::Point(3000, 3000)).co.X, 2500); + CHECK_EQUAL(1000, kf.GetClosestPoint(openshot::Point(900, 900)).co.X); + CHECK_EQUAL(1, kf.GetClosestPoint(openshot::Point(1, 1)).co.X); + CHECK_EQUAL(1000, kf.GetClosestPoint(openshot::Point(5, 5)).co.X); + CHECK_EQUAL(1000, kf.GetClosestPoint(openshot::Point(1000, 1000)).co.X); + CHECK_EQUAL(2500, kf.GetClosestPoint(openshot::Point(1001, 1001)).co.X); + CHECK_EQUAL(2500, kf.GetClosestPoint(openshot::Point(2500, 2500)).co.X); + CHECK_EQUAL(2500, kf.GetClosestPoint(openshot::Point(3000, 3000)).co.X); // Spot check values from the curve (to the left) - CHECK_EQUAL(kf.GetClosestPoint(openshot::Point(900, 900), true).co.X, 1); - CHECK_EQUAL(kf.GetClosestPoint(openshot::Point(1, 1), true).co.X, 1); - CHECK_EQUAL(kf.GetClosestPoint(openshot::Point(5, 5), true).co.X, 1); - CHECK_EQUAL(kf.GetClosestPoint(openshot::Point(1000, 1000), true).co.X, 1); - CHECK_EQUAL(kf.GetClosestPoint(openshot::Point(1001, 1001), true).co.X, 1000); - CHECK_EQUAL(kf.GetClosestPoint(openshot::Point(2500, 2500), true).co.X, 1000); - CHECK_EQUAL(kf.GetClosestPoint(openshot::Point(3000, 3000), true).co.X, 2500); + CHECK_EQUAL(1, kf.GetClosestPoint(openshot::Point(900, 900), true).co.X); + CHECK_EQUAL(1, kf.GetClosestPoint(openshot::Point(1, 1), true).co.X); + CHECK_EQUAL(1, kf.GetClosestPoint(openshot::Point(5, 5), true).co.X); + CHECK_EQUAL(1, kf.GetClosestPoint(openshot::Point(1000, 1000), true).co.X); + CHECK_EQUAL(1000, kf.GetClosestPoint(openshot::Point(1001, 1001), true).co.X); + CHECK_EQUAL(1000, kf.GetClosestPoint(openshot::Point(2500, 2500), true).co.X); + CHECK_EQUAL(2500, kf.GetClosestPoint(openshot::Point(3000, 3000), true).co.X); } @@ -255,13 +260,13 @@ kf.AddPoint(2500, 0.0); // Spot check values from the curve - CHECK_EQUAL(kf.GetPreviousPoint(kf.GetClosestPoint(openshot::Point(900, 900))).co.X, 1); - CHECK_EQUAL(kf.GetPreviousPoint(kf.GetClosestPoint(openshot::Point(1, 1))).co.X, 1); - CHECK_EQUAL(kf.GetPreviousPoint(kf.GetClosestPoint(openshot::Point(5, 5))).co.X, 1); - CHECK_EQUAL(kf.GetPreviousPoint(kf.GetClosestPoint(openshot::Point(1000, 1000))).co.X, 1); - CHECK_EQUAL(kf.GetPreviousPoint(kf.GetClosestPoint(openshot::Point(1001, 1001))).co.X, 1000); - CHECK_EQUAL(kf.GetPreviousPoint(kf.GetClosestPoint(openshot::Point(2500, 2500))).co.X, 1000); - CHECK_EQUAL(kf.GetPreviousPoint(kf.GetClosestPoint(openshot::Point(3000, 3000))).co.X, 1000); + CHECK_EQUAL(1, kf.GetPreviousPoint(kf.GetClosestPoint(openshot::Point(900, 900))).co.X); + CHECK_EQUAL(1, kf.GetPreviousPoint(kf.GetClosestPoint(openshot::Point(1, 1))).co.X); + CHECK_EQUAL(1, kf.GetPreviousPoint(kf.GetClosestPoint(openshot::Point(5, 5))).co.X); + CHECK_EQUAL(1, kf.GetPreviousPoint(kf.GetClosestPoint(openshot::Point(1000, 1000))).co.X); + CHECK_EQUAL(1000, kf.GetPreviousPoint(kf.GetClosestPoint(openshot::Point(1001, 1001))).co.X); + CHECK_EQUAL(1000, kf.GetPreviousPoint(kf.GetClosestPoint(openshot::Point(2500, 2500))).co.X); + CHECK_EQUAL(1000, kf.GetPreviousPoint(kf.GetClosestPoint(openshot::Point(3000, 3000))).co.X); } @@ -272,22 +277,22 @@ kf.AddPoint(1, 1.0); // Spot check values from the curve - CHECK_EQUAL(kf.GetMaxPoint().co.Y, 1.0); + CHECK_EQUAL(1.0, kf.GetMaxPoint().co.Y); kf.AddPoint(2, 0.0); // Spot check values from the curve - CHECK_EQUAL(kf.GetMaxPoint().co.Y, 1.0); + CHECK_EQUAL(1.0, kf.GetMaxPoint().co.Y); kf.AddPoint(3, 2.0); // Spot check values from the curve - CHECK_EQUAL(kf.GetMaxPoint().co.Y, 2.0); + CHECK_EQUAL(2.0, kf.GetMaxPoint().co.Y); kf.AddPoint(4, 1.0); // Spot check values from the curve - CHECK_EQUAL(kf.GetMaxPoint().co.Y, 2.0); + CHECK_EQUAL(2.0, kf.GetMaxPoint().co.Y); } TEST(Keyframe_Scale_Keyframe) @@ -302,8 +307,8 @@ CHECK_CLOSE(1.0f, kf.GetValue(1), 0.01); CHECK_CLOSE(7.99f, kf.GetValue(24), 0.01); CHECK_CLOSE(8.0f, kf.GetValue(25), 0.01); - CHECK_CLOSE(3.68f, kf.GetValue(40), 0.01); - CHECK_CLOSE(2.0f, kf.GetValue(49), 0.01); + CHECK_CLOSE(3.85f, kf.GetValue(40), 0.01); + CHECK_CLOSE(2.01f, kf.GetValue(49), 0.01); CHECK_CLOSE(2.0f, kf.GetValue(50), 0.01); // Resize / Scale the keyframe @@ -311,12 +316,12 @@ // Spot check values from the curve CHECK_CLOSE(1.0f, kf.GetValue(1), 0.01); - CHECK_CLOSE(4.21f, kf.GetValue(24), 0.01); - CHECK_CLOSE(4.47f, kf.GetValue(25), 0.01); - CHECK_CLOSE(7.57f, kf.GetValue(40), 0.01); + CHECK_CLOSE(4.08f, kf.GetValue(24), 0.01); + CHECK_CLOSE(4.36f, kf.GetValue(25), 0.01); + CHECK_CLOSE(7.53f, kf.GetValue(40), 0.01); CHECK_CLOSE(7.99f, kf.GetValue(49), 0.01); CHECK_CLOSE(8.0f, kf.GetValue(50), 0.01); - CHECK_CLOSE(2.35f, kf.GetValue(90), 0.01); + CHECK_CLOSE(2.39f, kf.GetValue(90), 0.01); CHECK_CLOSE(2.0f, kf.GetValue(100), 0.01); // Resize / Scale the keyframe @@ -326,8 +331,8 @@ CHECK_CLOSE(1.0f, kf.GetValue(1), 0.01); CHECK_CLOSE(7.99f, kf.GetValue(24), 0.01); CHECK_CLOSE(8.0f, kf.GetValue(25), 0.01); - CHECK_CLOSE(3.68f, kf.GetValue(40), 0.01); - CHECK_CLOSE(2.0f, kf.GetValue(49), 0.01); + CHECK_CLOSE(3.85f, kf.GetValue(40), 0.01); + CHECK_CLOSE(2.01f, kf.GetValue(49), 0.01); CHECK_CLOSE(2.0f, kf.GetValue(50), 0.01); } @@ -375,6 +380,117 @@ kf.AddPoint(1, 2.0); // Spot check values from the curve - CHECK_EQUAL(kf.GetLength(), 1); - CHECK_CLOSE(kf.GetPoint(0).co.Y, 2.0, 0.01); -} \ No newline at end of file + CHECK_EQUAL(1, kf.GetLength()); + CHECK_CLOSE(2.0, kf.GetPoint(0).co.Y, 0.01); +} + +TEST(Keyframe_Large_Number_Values) +{ + // Large value + int64_t const large_value = 30 * 60 * 90; + + // Create a keyframe curve with 2 points + Keyframe kf; + kf.AddPoint(1, 1.0); + kf.AddPoint(large_value, 100.0); // 90 minutes long + + // Spot check values from the curve + CHECK_EQUAL(large_value + 1, kf.GetLength()); + CHECK_CLOSE(1.0, kf.GetPoint(0).co.Y, 0.01); + CHECK_CLOSE(100.0, kf.GetPoint(1).co.Y, 0.01); +} + +TEST(Keyframe_Remove_Point) +{ + Keyframe kf; + kf.AddPoint(openshot::Point(Coordinate(1, 1), CONSTANT)); + kf.AddPoint(openshot::Point(Coordinate(3, 100), CONSTANT)); + CHECK_EQUAL(1, kf.GetInt(2)); + kf.AddPoint(openshot::Point(Coordinate(2, 50), CONSTANT)); + CHECK_EQUAL(50, kf.GetInt(2)); + kf.RemovePoint(1); // This is the index of point with X == 2 + CHECK_EQUAL(1, kf.GetInt(2)); + CHECK_THROW(kf.RemovePoint(100), OutOfBoundsPoint); +} + +TEST(Keyframe_Constant_Interpolation_First_Segment) +{ + Keyframe kf; + kf.AddPoint(Point(Coordinate(1, 1), CONSTANT)); + kf.AddPoint(Point(Coordinate(2, 50), CONSTANT)); + kf.AddPoint(Point(Coordinate(3, 100), CONSTANT)); + CHECK_EQUAL(1, kf.GetInt(0)); + CHECK_EQUAL(1, kf.GetInt(1)); + CHECK_EQUAL(50, kf.GetInt(2)); + CHECK_EQUAL(100, kf.GetInt(3)); + CHECK_EQUAL(100, kf.GetInt(4)); +} + +TEST(Keyframe_isIncreasing) +{ + // Which cases need to be tested to keep same behaviour as + // previously? + // + // - "invalid point" => true + // - point where all next values are equal => false + // - point where first non-eq next value is smaller => false + // - point where first non-eq next value is larger => true + Keyframe kf; + kf.AddPoint(1, 1, LINEAR); // testing with linear + kf.AddPoint(3, 5, BEZIER); // testing with bezier + kf.AddPoint(6, 10, CONSTANT); // first non-eq is smaller + kf.AddPoint(8, 8, CONSTANT); // first non-eq is larger + kf.AddPoint(10, 10, CONSTANT); // all next values are equal + kf.AddPoint(15, 10, CONSTANT); + + // "invalid points" + CHECK_EQUAL(true, kf.IsIncreasing(0)); + CHECK_EQUAL(true, kf.IsIncreasing(15)); + // all next equal + CHECK_EQUAL(false, kf.IsIncreasing(12)); + // first non-eq is larger + CHECK_EQUAL(true, kf.IsIncreasing(8)); + // first non-eq is smaller + CHECK_EQUAL(false, kf.IsIncreasing(6)); + // bezier and linear + CHECK_EQUAL(true, kf.IsIncreasing(4)); + CHECK_EQUAL(true, kf.IsIncreasing(2)); +} + +TEST(Keyframe_GetLength) +{ + Keyframe f; + CHECK_EQUAL(0, f.GetLength()); + f.AddPoint(1, 1); + CHECK_EQUAL(1, f.GetLength()); + f.AddPoint(2, 1); + CHECK_EQUAL(3, f.GetLength()); + f.AddPoint(200, 1); + CHECK_EQUAL(201, f.GetLength()); + + Keyframe g; + g.AddPoint(200, 1); + CHECK_EQUAL(1, g.GetLength()); + g.AddPoint(1,1); + CHECK_EQUAL(201, g.GetLength()); +} + +TEST(Keyframe_Use_Interpolation_of_Segment_End_Point) +{ + Keyframe f; + f.AddPoint(1,0, CONSTANT); + f.AddPoint(100,155, BEZIER); + CHECK_CLOSE(75.9, f.GetValue(50), 0.1); +} + +TEST(Keyframe_Handle_Large_Segment) +{ + Keyframe kf; + kf.AddPoint(1, 0, CONSTANT); + kf.AddPoint(1000000, 1, LINEAR); + UNITTEST_TIME_CONSTRAINT(10); // 10 milliseconds would still be relatively slow, but need to think about slower build machines! + CHECK_CLOSE(0.5, kf.GetValue(500000), 0.01); + CHECK_EQUAL(true, kf.IsIncreasing(10)); + Fraction fr = kf.GetRepeatFraction(250000); + CHECK_CLOSE(0.5, (double)fr.num / fr.den, 0.01); +} diff -Nru libopenshot-0.2.2+dfsg1/tests/Point_Tests.cpp libopenshot-0.2.5+dfsg1/tests/Point_Tests.cpp --- libopenshot-0.2.2+dfsg1/tests/Point_Tests.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/tests/Point_Tests.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Unit tests for openshot::Point * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -26,6 +29,8 @@ */ #include "UnitTest++.h" +// Prevent name clashes with juce::UnitTest +#define DONT_SET_USING_JUCE_NAMESPACE 1 #include "../include/OpenShot.h" using namespace std; diff -Nru libopenshot-0.2.2+dfsg1/tests/ReaderBase_Tests.cpp libopenshot-0.2.5+dfsg1/tests/ReaderBase_Tests.cpp --- libopenshot-0.2.2+dfsg1/tests/ReaderBase_Tests.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/tests/ReaderBase_Tests.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Unit tests for openshot::ReaderBase * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -26,6 +29,8 @@ */ #include "UnitTest++.h" +// Prevent name clashes with juce::UnitTest +#define DONT_SET_USING_JUCE_NAMESPACE 1 #include "../include/OpenShot.h" using namespace std; @@ -44,9 +49,9 @@ std::shared_ptr GetFrame(int64_t number) { std::shared_ptr f(new Frame()); return f; } void Close() { }; void Open() { }; - string Json() { return NULL; }; + string Json() const { return NULL; }; void SetJson(string value) { }; - Json::Value JsonValue() { return (int) NULL; }; + Json::Value JsonValue() const { return (int) NULL; }; void SetJsonValue(Json::Value root) { }; bool IsOpen() { return true; }; string Name() { return "TestReader"; }; @@ -56,7 +61,6 @@ TestReader t1; // Check some of the default values of the FileInfo struct on the base class - // If InitFileInfo() is not called in the derived class, these checks would fail. CHECK_EQUAL(false, t1.info.has_audio); CHECK_EQUAL(false, t1.info.has_audio); CHECK_CLOSE(0.0f, t1.info.duration, 0.00001); diff -Nru libopenshot-0.2.2+dfsg1/tests/Settings_Tests.cpp libopenshot-0.2.5+dfsg1/tests/Settings_Tests.cpp --- libopenshot-0.2.2+dfsg1/tests/Settings_Tests.cpp 1970-01-01 00:00:00.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/tests/Settings_Tests.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -0,0 +1,64 @@ +/** + * @file + * @brief Unit tests for openshot::Color + * @author Jonathan Thomas + * + * @ref License + */ + +/* LICENSE + * + * Copyright (c) 2008-2019 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#include "UnitTest++.h" +// Prevent name clashes with juce::UnitTest +#define DONT_SET_USING_JUCE_NAMESPACE 1 +#include "../include/OpenShot.h" + +using namespace std; +using namespace openshot; + +TEST(Settings_Default_Constructor) +{ + // Create an empty color + Settings *s = Settings::Instance(); + + CHECK_EQUAL(0, s->HARDWARE_DECODER); + CHECK_EQUAL(false, s->HIGH_QUALITY_SCALING); + CHECK_EQUAL(false, s->WAIT_FOR_VIDEO_PROCESSING_TASK); +} + +TEST(Settings_Change_Settings) +{ + // Create an empty color + Settings *s = Settings::Instance(); + s->HARDWARE_DECODER = 1; + s->HIGH_QUALITY_SCALING = true; + s->WAIT_FOR_VIDEO_PROCESSING_TASK = true; + + CHECK_EQUAL(1, s->HARDWARE_DECODER); + CHECK_EQUAL(true, s->HIGH_QUALITY_SCALING); + CHECK_EQUAL(true, s->WAIT_FOR_VIDEO_PROCESSING_TASK); + + CHECK_EQUAL(1, s->HARDWARE_DECODER); + CHECK_EQUAL(true, Settings::Instance()->HIGH_QUALITY_SCALING); + CHECK_EQUAL(true, Settings::Instance()->WAIT_FOR_VIDEO_PROCESSING_TASK); +} diff -Nru libopenshot-0.2.2+dfsg1/tests/tests.cpp libopenshot-0.2.5+dfsg1/tests/tests.cpp --- libopenshot-0.2.2+dfsg1/tests/tests.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/tests/tests.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Source code for Unit test executable (runs all tests and reports successes and failures) * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the diff -Nru libopenshot-0.2.2+dfsg1/tests/Timeline_Tests.cpp libopenshot-0.2.5+dfsg1/tests/Timeline_Tests.cpp --- libopenshot-0.2.2+dfsg1/tests/Timeline_Tests.cpp 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/tests/Timeline_Tests.cpp 2020-03-03 08:00:06.000000000 +0000 @@ -3,9 +3,12 @@ * @brief Unit tests for openshot::Timeline * @author Jonathan Thomas * - * @section LICENSE + * @ref License + */ + +/* LICENSE * - * Copyright (c) 2008-2014 OpenShot Studios, LLC + * Copyright (c) 2008-2019 OpenShot Studios, LLC * . This file is part of * OpenShot Library (libopenshot), an open-source project dedicated to * delivering high quality video editing and animation solutions to the @@ -26,8 +29,9 @@ */ #include "UnitTest++.h" +// Prevent name clashes with juce::UnitTest +#define DONT_SET_USING_JUCE_NAMESPACE 1 #include "../include/OpenShot.h" -#include "../include/Tests.h" using namespace std; using namespace openshot; @@ -119,64 +123,64 @@ int pixel_index = 230 * 4; // pixel 230 (4 bytes per pixel) // Check image properties - CHECK_EQUAL(21, (int)f->GetPixels(pixel_row)[pixel_index]); - CHECK_EQUAL(191, (int)f->GetPixels(pixel_row)[pixel_index + 1]); - CHECK_EQUAL(0, (int)f->GetPixels(pixel_row)[pixel_index + 2]); - CHECK_EQUAL(255, (int)f->GetPixels(pixel_row)[pixel_index + 3]); + CHECK_CLOSE(21, (int)f->GetPixels(pixel_row)[pixel_index], 5); + CHECK_CLOSE(191, (int)f->GetPixels(pixel_row)[pixel_index + 1], 5); + CHECK_CLOSE(0, (int)f->GetPixels(pixel_row)[pixel_index + 2], 5); + CHECK_CLOSE(255, (int)f->GetPixels(pixel_row)[pixel_index + 3], 5); // Get frame f = t.GetFrame(2); // Check image properties - CHECK_EQUAL(176, (int)f->GetPixels(pixel_row)[pixel_index]); - CHECK_EQUAL(0, (int)f->GetPixels(pixel_row)[pixel_index + 1]); - CHECK_EQUAL(186, (int)f->GetPixels(pixel_row)[pixel_index + 2]); - CHECK_EQUAL(255, (int)f->GetPixels(pixel_row)[pixel_index + 3]); + CHECK_CLOSE(176, (int)f->GetPixels(pixel_row)[pixel_index], 5); + CHECK_CLOSE(0, (int)f->GetPixels(pixel_row)[pixel_index + 1], 5); + CHECK_CLOSE(186, (int)f->GetPixels(pixel_row)[pixel_index + 2], 5); + CHECK_CLOSE(255, (int)f->GetPixels(pixel_row)[pixel_index + 3], 5); // Get frame f = t.GetFrame(3); // Check image properties - CHECK_EQUAL(23, (int)f->GetPixels(pixel_row)[pixel_index]); - CHECK_EQUAL(190, (int)f->GetPixels(pixel_row)[pixel_index + 1]); - CHECK_EQUAL(0, (int)f->GetPixels(pixel_row)[pixel_index + 2]); - CHECK_EQUAL(255, (int)f->GetPixels(pixel_row)[pixel_index + 3]); + CHECK_CLOSE(23, (int)f->GetPixels(pixel_row)[pixel_index], 5); + CHECK_CLOSE(190, (int)f->GetPixels(pixel_row)[pixel_index + 1], 5); + CHECK_CLOSE(0, (int)f->GetPixels(pixel_row)[pixel_index + 2], 5); + CHECK_CLOSE(255, (int)f->GetPixels(pixel_row)[pixel_index + 3], 5); // Get frame f = t.GetFrame(24); // Check image properties - CHECK_EQUAL(186, (int)f->GetPixels(pixel_row)[pixel_index]); - CHECK_EQUAL(106, (int)f->GetPixels(pixel_row)[pixel_index + 1]); - CHECK_EQUAL(0, (int)f->GetPixels(pixel_row)[pixel_index + 2]); - CHECK_EQUAL(255, (int)f->GetPixels(pixel_row)[pixel_index + 3]); + CHECK_CLOSE(186, (int)f->GetPixels(pixel_row)[pixel_index], 5); + CHECK_CLOSE(106, (int)f->GetPixels(pixel_row)[pixel_index + 1], 5); + CHECK_CLOSE(0, (int)f->GetPixels(pixel_row)[pixel_index + 2], 5); + CHECK_CLOSE(255, (int)f->GetPixels(pixel_row)[pixel_index + 3], 5); // Get frame f = t.GetFrame(5); // Check image properties - CHECK_EQUAL(23, (int)f->GetPixels(pixel_row)[pixel_index]); - CHECK_EQUAL(190, (int)f->GetPixels(pixel_row)[pixel_index + 1]); - CHECK_EQUAL(0, (int)f->GetPixels(pixel_row)[pixel_index + 2]); - CHECK_EQUAL(255, (int)f->GetPixels(pixel_row)[pixel_index + 3]); + CHECK_CLOSE(23, (int)f->GetPixels(pixel_row)[pixel_index], 5); + CHECK_CLOSE(190, (int)f->GetPixels(pixel_row)[pixel_index + 1], 5); + CHECK_CLOSE(0, (int)f->GetPixels(pixel_row)[pixel_index + 2], 5); + CHECK_CLOSE(255, (int)f->GetPixels(pixel_row)[pixel_index + 3], 5); // Get frame f = t.GetFrame(25); // Check image properties - CHECK_EQUAL(0, (int)f->GetPixels(pixel_row)[pixel_index]); - CHECK_EQUAL(94, (int)f->GetPixels(pixel_row)[pixel_index + 1]); - CHECK_EQUAL(186, (int)f->GetPixels(pixel_row)[pixel_index + 2]); - CHECK_EQUAL(255, (int)f->GetPixels(pixel_row)[pixel_index + 3]); + CHECK_CLOSE(0, (int)f->GetPixels(pixel_row)[pixel_index], 5); + CHECK_CLOSE(94, (int)f->GetPixels(pixel_row)[pixel_index + 1], 5); + CHECK_CLOSE(186, (int)f->GetPixels(pixel_row)[pixel_index + 2], 5); + CHECK_CLOSE(255, (int)f->GetPixels(pixel_row)[pixel_index + 3], 5); // Get frame f = t.GetFrame(4); // Check image properties - CHECK_EQUAL(176, (int)f->GetPixels(pixel_row)[pixel_index]); - CHECK_EQUAL(0, (int)f->GetPixels(pixel_row)[pixel_index + 1]); - CHECK_EQUAL(186, (int)f->GetPixels(pixel_row)[pixel_index + 2]); - CHECK_EQUAL(255, (int)f->GetPixels(pixel_row)[pixel_index + 3]); + CHECK_CLOSE(176, (int)f->GetPixels(pixel_row)[pixel_index], 5); + CHECK_CLOSE(0, (int)f->GetPixels(pixel_row)[pixel_index + 1], 5); + CHECK_CLOSE(186, (int)f->GetPixels(pixel_row)[pixel_index + 2], 5); + CHECK_CLOSE(255, (int)f->GetPixels(pixel_row)[pixel_index + 3], 5); // Close reader t.Close(); @@ -243,7 +247,6 @@ clip_middle1.Position(0.5); t.AddClip(&clip_middle1); - // Loop through clips again, and re-check order counter = 0; clips = t.Clips(); diff -Nru libopenshot-0.2.2+dfsg1/.travis.yml libopenshot-0.2.5+dfsg1/.travis.yml --- libopenshot-0.2.2+dfsg1/.travis.yml 2018-09-22 19:47:46.000000000 +0000 +++ libopenshot-0.2.5+dfsg1/.travis.yml 2020-03-03 08:00:06.000000000 +0000 @@ -1,49 +1,167 @@ -dist: trusty +language: cpp +compiler: gcc + +# This section uses a rather esoteric (and tricky!) feature of YAML, +# &aliases and *anchors, to build package lists out of sublists without +# repeating their contents. Basically, '&name' creates an alias for the +# given data, which can then be referenced using the anchor '*name'. +addons: + apt: + packages: &p_common # Packages common to all Ubuntu builds + - cmake + - swig + - libopenshot-audio-dev + - libmagick++-dev + - libunittest++-dev + - libzmq3-dev + - qtbase5-dev + - qtmultimedia5-dev + - doxygen + - graphviz + - curl + packages: &ff_common # Common set of FFmpeg packages + - *p_common + - libfdk-aac-dev + - libavcodec-dev + - libavformat-dev + - libavdevice-dev + - libavutil-dev + - libavfilter-dev + - libswscale-dev + - libpostproc-dev + - libavresample-dev + - libswresample-dev matrix: include: - - language: cpp - name: "FFmpeg 2" - before_script: - - sudo add-apt-repository ppa:openshot.developers/libopenshot-daily -y - - sudo add-apt-repository ppa:beineri/opt-qt58-trusty -y - - sudo apt-get update -qq - - sudo apt-get install gcc-4.8 cmake libavcodec-dev libavformat-dev libswscale-dev libavresample-dev libavutil-dev libopenshot-audio-dev libopenshot-dev libfdk-aac-dev libfdk-aac-dev libjsoncpp-dev libmagick++-dev libopenshot-audio-dev libunittest++-dev libzmq3-dev pkg-config python3-dev qtbase5-dev qtmultimedia5-dev swig -y - - sudo apt autoremove -y - script: - - mkdir -p build; cd build; - - cmake -D"CMAKE_BUILD_TYPE:STRING=Debug" ../ - - make VERBOSE=1 - - make test - - - language: cpp - name: "FFmpeg 3" - before_script: - - sudo add-apt-repository ppa:openshot.developers/libopenshot-daily -y - - sudo add-apt-repository ppa:beineri/opt-qt58-trusty -y - - sudo add-apt-repository ppa:jonathonf/ffmpeg-3 -y - - sudo apt-get update -qq - - sudo apt-get install gcc-4.8 cmake libavcodec-dev libavformat-dev libswscale-dev libavresample-dev libavutil-dev libopenshot-audio-dev libopenshot-dev libfdk-aac-dev libfdk-aac-dev libjsoncpp-dev libmagick++-dev libopenshot-audio-dev libunittest++-dev libzmq3-dev pkg-config python3-dev qtbase5-dev qtmultimedia5-dev swig -y - - sudo apt autoremove -y - script: - - mkdir -p build; cd build; - - cmake -D"CMAKE_BUILD_TYPE:STRING=Debug" ../ - - make VERBOSE=1 - - make test - - - language: cpp - name: "FFmpeg 4" - before_script: - - sudo add-apt-repository ppa:openshot.developers/libopenshot-daily -y - - sudo add-apt-repository ppa:beineri/opt-qt58-trusty -y - - sudo add-apt-repository ppa:jonathonf/ffmpeg -y - - sudo add-apt-repository ppa:jonathonf/ffmpeg-4 -y - - sudo add-apt-repository ppa:jonathonf/backports -y - - sudo apt-get update -qq - - sudo apt-get install gcc-4.8 cmake libavcodec58 libavformat58 libavcodec-dev libavformat-dev libswscale-dev libavresample-dev libavutil-dev libopenshot-audio-dev libopenshot-dev libfdk-aac-dev libfdk-aac-dev libjsoncpp-dev libmagick++-dev libopenshot-audio-dev libunittest++-dev libzmq3-dev pkg-config python3-dev qtbase5-dev qtmultimedia5-dev swig -y - - sudo apt autoremove -y - script: - - mkdir -p build; cd build; - - cmake -D"CMAKE_BUILD_TYPE:STRING=Debug" ../ - - make VERBOSE=1 - - make test + + - name: "Coverage (Ubuntu 18.04 Bionic)" + env: + - BUILD_VERSION=coverage_ffmpeg34 + - CMAKE_EXTRA_ARGS="-DENABLE_COVERAGE=1" + - TEST_TARGET=coverage + os: linux + dist: bionic + addons: + apt: + sources: + - sourceline: 'ppa:openshot.developers/libopenshot-daily' + - sourceline: 'ppa:beineri/opt-qt-5.12.3-bionic' + packages: + - *ff_common + - qt5-default + - lcov + - binutils-common # For c++filt + + - name: "FFmpeg 4 GCC (Ubuntu 18.04 Bionic)" + env: + - BUILD_VERSION=ffmpeg4 + - CMAKE_EXTRA_ARGS="" + - TEST_TARGET=test + os: linux + dist: bionic + addons: + apt: + sources: + - sourceline: 'ppa:openshot.developers/libopenshot-daily' + - sourceline: 'ppa:beineri/opt-qt-5.12.3-bionic' + - sourceline: 'ppa:jonathonf/ffmpeg-4' + packages: + - *ff_common + - qt5-default + - libjsoncpp-dev + - libavcodec58 + - libavformat58 + - libavdevice58 + - libavutil56 + - libavfilter7 + - libswscale5 + - libpostproc55 + - libavresample4 + - libswresample3 + + - name: "FFmpeg 3.4 GCC (Ubuntu 18.04 Bionic)" + env: + - BUILD_VERSION=ffmpeg34 + - CMAKE_EXTRA_ARGS="" + - TEST_TARGET=test + os: linux + dist: bionic + addons: + apt: + sources: + - sourceline: 'ppa:openshot.developers/libopenshot-daily' + - sourceline: 'ppa:beineri/opt-qt-5.12.3-bionic' + packages: + - *ff_common + - qt5-default + - libjsoncpp-dev + + - name: "FFmpeg 3.4 Clang (Ubuntu 18.04 Bionic)" + env: + - BUILD_VERSION=clang_ffmpeg34 + - CMAKE_EXTRA_ARGS="" + - TEST_TARGET=test + os: linux + dist: bionic + compiler: clang + addons: + apt: + sources: + - sourceline: 'ppa:openshot.developers/libopenshot-daily' + - sourceline: 'ppa:beineri/opt-qt-5.12.3-bionic' + packages: + - *ff_common + - qt5-default + - libomp-dev + + - name: "FFmpeg 3.2 GCC (Ubuntu 16.04 Xenial)" + env: + - BUILD_VERSION=ffmpeg32 + - CMAKE_EXTRA_ARGS="" + - TEST_TARGET="os_test" + os: linux + dist: xenial + addons: + apt: + sources: + - sourceline: 'ppa:openshot.developers/libopenshot-daily' + - sourceline: 'ppa:beineri/opt-qt-5.10.0-xenial' + - sourceline: 'ppa:jon-hedgerows/ffmpeg-backports' + packages: + - *ff_common + - libavcodec57 + - libavdevice57 + - libavfilter6 + - libavformat57 + - libavresample3 + - libavutil55 + - libpostproc54 + - libswresample2 + - libswscale4 + + - name: "FFmpeg 2 GCC (Ubuntu 16.04 Xenial)" + env: + - BUILD_VERSION=ffmpeg2 + - CMAKE_EXTRA_ARGS="" + - TEST_TARGET="os_test" + os: linux + dist: xenial + addons: + apt: + sources: + - sourceline: 'ppa:openshot.developers/libopenshot-daily' + - sourceline: 'ppa:beineri/opt-qt-5.10.0-xenial' + packages: + - *ff_common + +script: + - mkdir -p build; cd build; + - cmake -DCMAKE_BUILD_TYPE:STRING="Debug" ${CMAKE_EXTRA_ARGS} ../ + - make VERBOSE=1 + - make ${TEST_TARGET} + - make install DESTDIR="$BUILD_VERSION" + - cd .. + +after_success: + - if [ "x$TEST_TARGET" = "xcoverage" ]; then bash <(curl -s https://codecov.io/bash) -f build/coverage.info || echo "Codecov did not collect coverage reports"; fi