Compare commits

..

No commits in common. "master" and "v0.4.1" have entirely different histories.

519 changed files with 35351 additions and 54833 deletions

View File

@ -1,17 +0,0 @@
{
"problemMatcher": [
{
"owner": "gcc-problem-matcher",
"pattern": [
{
"regexp": "^(.*?):(\\d+):(\\d*):?\\s+(?:fatal\\s+)?(warning|error):\\s+(.*)$",
"file": 1,
"line": 2,
"column": 3,
"severity": 4,
"message": 5
}
]
}
]
}

View File

@ -1,17 +0,0 @@
{
"problemMatcher": [
{
"owner": "meson-problem-matcher",
"pattern": [
{
"regexp": "^(.*?)?:(\\d+)?:(\\d+)?: (WARNING|ERROR):\\s+(.*)$",
"file": 1,
"line": 2,
"column": 3,
"severity": 4,
"message": 5
}
]
}
]
}

View File

@ -1,17 +0,0 @@
{
"problemMatcher": [
{
"owner": "vala-problem-matcher",
"pattern": [
{
"regexp": "^(?:../)?(.*?):(\\d+).(\\d+)-\\d+.\\d+:?\\s+(?:fatal\\s+)?(warning|error):\\s+(.*)$",
"file": 1,
"line": 2,
"column": 3,
"severity": 4,
"message": 5
}
]
}
]
}

View File

@ -2,42 +2,13 @@ name: Build
on: [pull_request, push]
jobs:
build:
name: "Build"
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
steps:
- name: "Checkout sources"
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: "Setup matchers"
run: |
echo '::add-matcher::${{ github.workspace }}/.github/matchers/gcc-problem-matcher.json'
echo '::add-matcher::${{ github.workspace }}/.github/matchers/vala-problem-matcher.json'
echo '::add-matcher::${{ github.workspace }}/.github/matchers/meson-problem-matcher.json'
- name: "Setup dependencies"
run: |
sudo apt-get update
sudo apt-get remove libunwind-14-dev
sudo apt-get install -y build-essential gettext libadwaita-1-dev libcanberra-dev libgcrypt20-dev libgee-0.8-dev libgpgme-dev libgstreamer-plugins-base1.0-dev libgstreamer1.0-dev libgtk-4-dev libnice-dev libnotify-dev libqrencode-dev libsignal-protocol-c-dev libsoup-3.0-dev libsqlite3-dev libsrtp2-dev libwebrtc-audio-processing-dev meson valac
- name: "Configure"
run: meson setup build
- name: "Build"
run: meson compile -C build
- name: "Test"
run: meson test -C build
build-flatpak:
name: "Build flatpak"
runs-on: ubuntu-24.04
container:
image: bilelmoussaoui/flatpak-github-actions:gnome-46
options: --privileged
steps:
- name: "Checkout sources"
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: "Build"
uses: flathub-infra/flatpak-github-actions/flatpak-builder@master
with:
manifest-path: im.dino.Dino.json
bundle: im.dino.Dino.flatpak
- uses: actions/checkout@v2
- run: sudo apt-get update
- run: sudo apt-get remove libunwind-14-dev
- run: sudo apt-get install -y build-essential gettext cmake valac libgee-0.8-dev libsqlite3-dev libgtk-4-dev libnotify-dev libgpgme-dev libsoup2.4-dev libgcrypt20-dev libqrencode-dev libgspell-1-dev libnice-dev libgstreamer1.0-dev libgstreamer-plugins-base1.0-dev libsrtp2-dev libwebrtc-audio-processing-dev libadwaita-1-dev
- run: ./configure --with-tests --with-libsignal-in-tree
- run: make
- run: build/xmpp-vala-test
- run: build/signal-protocol-vala-test

4
.gitmodules vendored Normal file
View File

@ -0,0 +1,4 @@
[submodule "libsignal-protocol-c"]
path = plugins/signal-protocol/libsignal-protocol-c
url = https://github.com/WhisperSystems/libsignal-protocol-c.git
branch = v2.3.3

215
CMakeLists.txt Normal file
View File

@ -0,0 +1,215 @@
cmake_minimum_required(VERSION 3.3)
list(APPEND CMAKE_MODULE_PATH ${CMAKE_SOURCE_DIR}/cmake)
include(ComputeVersion)
if (NOT VERSION_FOUND)
project(Dino LANGUAGES C CXX)
elseif (VERSION_IS_RELEASE)
project(Dino VERSION ${VERSION_FULL} LANGUAGES C CXX)
else ()
project(Dino LANGUAGES C CXX)
set(PROJECT_VERSION ${VERSION_FULL})
endif ()
# Prepare Plugins
set(DEFAULT_PLUGINS omemo;openpgp;http-files;ice;rtp)
foreach (plugin ${DEFAULT_PLUGINS})
if ("$CACHE{DINO_PLUGIN_ENABLED_${plugin}}" STREQUAL "")
if (NOT DEFINED DINO_PLUGIN_ENABLED_${plugin}})
set(DINO_PLUGIN_ENABLED_${plugin} "yes" CACHE BOOL "Enable plugin ${plugin}")
else ()
set(DINO_PLUGIN_ENABLED_${plugin} "${DINO_PLUGIN_ENABLED_${plugin}}" CACHE BOOL "Enable plugin ${plugin}" FORCE)
endif ()
if (DINO_PLUGIN_ENABLED_${plugin})
message(STATUS "Enabled plugin: ${plugin}")
else ()
message(STATUS "Disabled plugin: ${plugin}")
endif ()
endif ()
endforeach (plugin)
if (DISABLED_PLUGINS)
foreach(plugin ${DISABLED_PLUGINS})
set(DINO_PLUGIN_ENABLED_${plugin} "no" CACHE BOOL "Enable plugin ${plugin}" FORCE)
message(STATUS "Disabled plugin: ${plugin}")
endforeach(plugin)
endif (DISABLED_PLUGINS)
if (ENABLED_PLUGINS)
foreach(plugin ${ENABLED_PLUGINS})
set(DINO_PLUGIN_ENABLED_${plugin} "yes" CACHE BOOL "Enable plugin ${plugin}" FORCE)
message(STATUS "Enabled plugin: ${plugin}")
endforeach(plugin)
endif (ENABLED_PLUGINS)
set(PLUGINS "")
get_cmake_property(all_variables VARIABLES)
foreach (variable_name ${all_variables})
if (variable_name MATCHES "^DINO_PLUGIN_ENABLED_(.+)$" AND ${variable_name})
list(APPEND PLUGINS ${CMAKE_MATCH_1})
endif()
endforeach ()
list(SORT PLUGINS)
string(REPLACE ";" ", " PLUGINS_TEXT "${PLUGINS}")
message(STATUS "Configuring Dino ${PROJECT_VERSION} with plugins: ${PLUGINS_TEXT}")
# Prepare instal paths
macro(set_path what val desc)
if (NOT ${what})
unset(${what} CACHE)
set(${what} ${val})
endif ()
if (NOT "${${what}}" STREQUAL "${_${what}_SET}")
message(STATUS "${desc}: ${${what}}")
set(_${what}_SET ${${what}} CACHE INTERNAL ${desc})
endif()
endmacro(set_path)
string(REGEX REPLACE "^liblib" "lib" LIBDIR_NAME "lib${LIB_SUFFIX}")
set_path(CMAKE_INSTALL_PREFIX "${CMAKE_INSTALL_PREFIX}" "Installation directory for architecture-independent files")
set_path(EXEC_INSTALL_PREFIX "${CMAKE_INSTALL_PREFIX}" "Installation directory for architecture-dependent files")
set_path(SHARE_INSTALL_PREFIX "${CMAKE_INSTALL_PREFIX}/share" "Installation directory for read-only architecture-independent data")
set_path(BIN_INSTALL_DIR "${EXEC_INSTALL_PREFIX}/bin" "Installation directory for user executables")
set_path(DATA_INSTALL_DIR "${SHARE_INSTALL_PREFIX}/dino" "Installation directory for dino-specific data")
set_path(APPDATA_FILE_INSTALL_DIR "${SHARE_INSTALL_PREFIX}/metainfo" "Installation directory for .appdata.xml files")
set_path(DESKTOP_FILE_INSTALL_DIR "${SHARE_INSTALL_PREFIX}/applications" "Installation directory for .desktop files")
set_path(SERVICE_FILE_INSTALL_DIR "${SHARE_INSTALL_PREFIX}/dbus-1/services" "Installation directory for .service files")
set_path(ICON_INSTALL_DIR "${SHARE_INSTALL_PREFIX}/icons" "Installation directory for icons")
set_path(INCLUDE_INSTALL_DIR "${EXEC_INSTALL_PREFIX}/include" "Installation directory for C header files")
set_path(LIB_INSTALL_DIR "${EXEC_INSTALL_PREFIX}/${LIBDIR_NAME}" "Installation directory for object code libraries")
set_path(LOCALE_INSTALL_DIR "${SHARE_INSTALL_PREFIX}/locale" "Installation directory for locale files")
set_path(PLUGIN_INSTALL_DIR "${LIB_INSTALL_DIR}/dino/plugins" "Installation directory for dino plugin object code files")
set_path(VAPI_INSTALL_DIR "${SHARE_INSTALL_PREFIX}/vala/vapi" "Installation directory for Vala API files")
set(TARGET_INSTALL LIBRARY DESTINATION ${LIB_INSTALL_DIR} RUNTIME DESTINATION ${BIN_INSTALL_DIR} PUBLIC_HEADER DESTINATION ${INCLUDE_INSTALL_DIR} ARCHIVE DESTINATION ${LIB_INSTALL_DIR})
set(PLUGIN_INSTALL LIBRARY DESTINATION ${PLUGIN_INSTALL_DIR} RUNTIME DESTINATION ${PLUGIN_INSTALL_DIR})
include(CheckCCompilerFlag)
include(CheckCSourceCompiles)
macro(AddCFlagIfSupported list flag)
string(REGEX REPLACE "[^a-z^A-Z^_^0-9]+" "_" flag_name ${flag})
check_c_compiler_flag(${flag} COMPILER_SUPPORTS${flag_name})
if (${COMPILER_SUPPORTS${flag_name}})
set(${list} "${${list}} ${flag}")
endif ()
endmacro()
if ("Ninja" STREQUAL ${CMAKE_GENERATOR})
AddCFlagIfSupported(CMAKE_C_FLAGS -fdiagnostics-color)
endif ()
# Flags for all C files
AddCFlagIfSupported(CMAKE_C_FLAGS -Wall)
AddCFlagIfSupported(CMAKE_C_FLAGS -Wextra)
AddCFlagIfSupported(CMAKE_C_FLAGS -Werror=format-security)
AddCFlagIfSupported(CMAKE_C_FLAGS -Wno-duplicate-decl-specifier)
AddCFlagIfSupported(CMAKE_C_FLAGS -fno-omit-frame-pointer)
if (NOT VALA_WARN)
set(VALA_WARN "conversion")
endif ()
set(VALA_WARN "${VALA_WARN}" CACHE STRING "Which warnings to show when invoking C compiler on Vala compiler output")
set_property(CACHE VALA_WARN PROPERTY STRINGS "all;unused;qualifier;conversion;deprecated;format;none")
# Vala generates some unused stuff
if (NOT ("all" IN_LIST VALA_WARN OR "unused" IN_LIST VALA_WARN))
AddCFlagIfSupported(VALA_CFLAGS -Wno-unused-but-set-variable)
AddCFlagIfSupported(VALA_CFLAGS -Wno-unused-function)
AddCFlagIfSupported(VALA_CFLAGS -Wno-unused-label)
AddCFlagIfSupported(VALA_CFLAGS -Wno-unused-parameter)
AddCFlagIfSupported(VALA_CFLAGS -Wno-unused-value)
AddCFlagIfSupported(VALA_CFLAGS -Wno-unused-variable)
endif ()
if (NOT ("all" IN_LIST VALA_WARN OR "qualifier" IN_LIST VALA_WARN))
AddCFlagIfSupported(VALA_CFLAGS -Wno-discarded-qualifiers)
AddCFlagIfSupported(VALA_CFLAGS -Wno-discarded-array-qualifiers)
AddCFlagIfSupported(VALA_CFLAGS -Wno-incompatible-pointer-types-discards-qualifiers)
endif ()
if (NOT ("all" IN_LIST VALA_WARN OR "deprecated" IN_LIST VALA_WARN))
AddCFlagIfSupported(VALA_CFLAGS -Wno-deprecated-declarations)
endif ()
if (NOT ("all" IN_LIST VALA_WARN OR "format" IN_LIST VALA_WARN))
AddCFlagIfSupported(VALA_CFLAGS -Wno-missing-braces)
endif ()
if (NOT ("all" IN_LIST VALA_WARN OR "conversion" IN_LIST VALA_WARN))
AddCFlagIfSupported(VALA_CFLAGS -Wno-int-conversion)
AddCFlagIfSupported(VALA_CFLAGS -Wno-pointer-sign)
AddCFlagIfSupported(VALA_CFLAGS -Wno-incompatible-pointer-types)
endif ()
try_compile(__WITHOUT_FILE_OFFSET_BITS_64 ${CMAKE_CURRENT_BINARY_DIR} ${CMAKE_SOURCE_DIR}/cmake/LargeFileOffsets.c COMPILE_DEFINITIONS ${CMAKE_REQUIRED_DEFINITIONS})
if (NOT __WITHOUT_FILE_OFFSET_BITS_64)
try_compile(__WITH_FILE_OFFSET_BITS_64 ${CMAKE_CURRENT_BINARY_DIR} ${CMAKE_SOURCE_DIR}/cmake/LargeFileOffsets.c COMPILE_DEFINITIONS ${CMAKE_REQUIRED_DEFINITIONS} -D_FILE_OFFSET_BITS=64)
if (__WITH_FILE_OFFSET_BITS_64)
AddCFlagIfSupported(CMAKE_C_FLAGS -D_FILE_OFFSET_BITS=64)
message(STATUS "Enabled large file support using _FILE_OFFSET_BITS=64")
else (__WITH_FILE_OFFSET_BITS_64)
message(STATUS "Large file support not available")
endif (__WITH_FILE_OFFSET_BITS_64)
unset(__WITH_FILE_OFFSET_BITS_64)
endif (NOT __WITHOUT_FILE_OFFSET_BITS_64)
unset(__WITHOUT_FILE_OFFSET_BITS_64)
if ($ENV{USE_CCACHE})
# Configure CCache if available
find_program(CCACHE_BIN ccache)
mark_as_advanced(CCACHE_BIN)
if (CCACHE_BIN)
message(STATUS "Using ccache")
set_property(GLOBAL PROPERTY RULE_LAUNCH_COMPILE ${CCACHE_BIN})
set_property(GLOBAL PROPERTY RULE_LAUNCH_LINK ${CCACHE_BIN})
else (CCACHE_BIN)
message(STATUS "USE_CCACHE was set but ccache was not found")
endif (CCACHE_BIN)
endif ($ENV{USE_CCACHE})
if (NOT NO_DEBUG)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -g")
set(CMAKE_VALA_FLAGS "${CMAKE_VALA_FLAGS} -g")
endif (NOT NO_DEBUG)
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR})
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR})
set(GLib_GLOBAL_VERSION 2.38)
set(ICU_GLOBAL_VERSION 57)
if (NOT VALA_EXECUTABLE)
unset(VALA_EXECUTABLE CACHE)
endif ()
find_package(Vala 0.34 REQUIRED)
if (VALA_VERSION VERSION_GREATER "0.34.90" AND VALA_VERSION VERSION_LESS "0.36.1" OR # Due to a bug on 0.36.0 (and pre-releases), we need to disable FAST_VAPI
VALA_VERSION VERSION_EQUAL "0.44.10" OR VALA_VERSION VERSION_EQUAL "0.46.4" OR VALA_VERSION VERSION_EQUAL "0.47.1" OR # See Dino issue #646
VALA_VERSION VERSION_EQUAL "0.40.21" OR VALA_VERSION VERSION_EQUAL "0.46.8" OR VALA_VERSION VERSION_EQUAL "0.48.4") # See Dino issue #816
set(DISABLE_FAST_VAPI yes)
endif ()
include(${VALA_USE_FILE})
include(MultiFind)
include(GlibCompileResourcesSupport)
find_package(GLib ${GLib_GLOBAL_VERSION} REQUIRED)
string(REGEX REPLACE "^([0-9]+)\\.[0-9]+(\\.[0-9]+)?" "\\1" GLib_MAJOR_VERSION "${GLib_VERSION}")
string(REGEX REPLACE "^[0-9]+\\.([0-9]+)(\\.[0-9]+)?" "\\1" GLib_MINOR_VERSION "${GLib_VERSION}")
math(EXPR GLib_LAST_RELEASE_MINOR_VERSION "${GLib_MINOR_VERSION} / 2 * 2")
set(CMAKE_VALA_FLAGS "${CMAKE_VALA_FLAGS} --target-glib=${GLib_MAJOR_VERSION}.${GLib_LAST_RELEASE_MINOR_VERSION}")
add_subdirectory(qlite)
add_subdirectory(xmpp-vala)
add_subdirectory(libdino)
add_subdirectory(main)
add_subdirectory(crypto-vala)
add_subdirectory(plugins)
# uninstall target
configure_file("${CMAKE_SOURCE_DIR}/cmake/cmake_uninstall.cmake.in" "${CMAKE_BINARY_DIR}/cmake_uninstall.cmake" IMMEDIATE @ONLY)
add_custom_target(uninstall COMMAND ${CMAKE_COMMAND} -P ${CMAKE_BINARY_DIR}/cmake_uninstall.cmake COMMENT "Uninstall the project...")

View File

@ -1,11 +1,7 @@
<img src="https://dino.im/img/logo.svg" width="80">
![Dino](https://dino.im/img/readme_header.svg)
=======
# Dino
Dino is an XMPP messaging app for Linux using GTK and Vala.
It supports calls, encryption, file transfers, group chats and more.
![screenshot](https://dino.im/img/appdata/screenshot-dino-0.4-main-2244x1644@2.png)
![screenshots](https://dino.im/img/screenshot-main.png)
Installation
------------
@ -15,9 +11,9 @@ Build
-----
Make sure to install all [dependencies](https://github.com/dino/dino/wiki/Build#dependencies).
meson setup build
meson compile -C build
build/main/dino
./configure
make
build/dino
Resources
---------
@ -34,8 +30,8 @@ Contribute
License
-------
Dino - XMPP messaging app using GTK/Vala
Copyright (C) 2016-2025 Dino contributors
Dino - Modern Jabber/XMPP Client using GTK+/Vala
Copyright (C) 2016-2023 Dino contributors
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by

View File

@ -0,0 +1,57 @@
# This file is used to be invoked at build time. It generates the needed
# resource XML file.
# Input variables that need to provided when invoking this script:
# GXML_OUTPUT The output file path where to save the XML file.
# GXML_COMPRESS_ALL Sets all COMPRESS flags in all resources in resource
# list.
# GXML_NO_COMPRESS_ALL Removes all COMPRESS flags in all resources in
# resource list.
# GXML_STRIPBLANKS_ALL Sets all STRIPBLANKS flags in all resources in
# resource list.
# GXML_NO_STRIPBLANKS_ALL Removes all STRIPBLANKS flags in all resources in
# resource list.
# GXML_TOPIXDATA_ALL Sets all TOPIXDATA flags i nall resources in resource
# list.
# GXML_NO_TOPIXDATA_ALL Removes all TOPIXDATA flags in all resources in
# resource list.
# GXML_PREFIX Overrides the resource prefix that is prepended to
# each relative name in registered resources.
# GXML_RESOURCES The list of resource files. Whether absolute or
# relative path is equal.
# Include the GENERATE_GXML() function.
include(${CMAKE_CURRENT_LIST_DIR}/GenerateGXML.cmake)
# Set flags to actual invocation flags.
if(GXML_COMPRESS_ALL)
set(GXML_COMPRESS_ALL COMPRESS_ALL)
endif()
if(GXML_NO_COMPRESS_ALL)
set(GXML_NO_COMPRESS_ALL NO_COMPRESS_ALL)
endif()
if(GXML_STRIPBLANKS_ALL)
set(GXML_STRIPBLANKS_ALL STRIPBLANKS_ALL)
endif()
if(GXML_NO_STRIPBLANKS_ALL)
set(GXML_NO_STRIPBLANKS_ALL NO_STRIPBLANKS_ALL)
endif()
if(GXML_TOPIXDATA_ALL)
set(GXML_TOPIXDATA_ALL TOPIXDATA_ALL)
endif()
if(GXML_NO_TOPIXDATA_ALL)
set(GXML_NO_TOPIXDATA_ALL NO_TOPIXDATA_ALL)
endif()
# Replace " " with ";" to import the list over the command line. Otherwise
# CMake would interprete the passed resources as a whole string.
string(REPLACE " " ";" GXML_RESOURCES ${GXML_RESOURCES})
# Invoke the gresource XML generation function.
generate_gxml(${GXML_OUTPUT}
${GXML_COMPRESS_ALL} ${GXML_NO_COMPRESS_ALL}
${GXML_STRIPBLANKS_ALL} ${GXML_NO_STRIPBLANKS_ALL}
${GXML_TOPIXDATA_ALL} ${GXML_NO_TOPIXDATA_ALL}
PREFIX ${GXML_PREFIX}
RESOURCES ${GXML_RESOURCES})

View File

@ -0,0 +1,221 @@
include(CMakeParseArguments)
# Path to this file.
set(GCR_CMAKE_MACRO_DIR ${CMAKE_CURRENT_LIST_DIR})
# Compiles a gresource resource file from given resource files. Automatically
# creates the XML controlling file.
# The type of resource to generate (header, c-file or bundle) is automatically
# determined from TARGET file ending, if no TYPE is explicitly specified.
# The output file is stored in the provided variable "output".
# "xml_out" contains the variable where to output the XML path. Can be used to
# create custom targets or doing postprocessing.
# If you want to use preprocessing, you need to manually check the existence
# of the tools you use. This function doesn't check this for you, it just
# generates the XML file. glib-compile-resources will then throw a
# warning/error.
function(COMPILE_GRESOURCES output xml_out)
# Available options:
# COMPRESS_ALL, NO_COMPRESS_ALL Overrides the COMPRESS flag in all
# registered resources.
# STRIPBLANKS_ALL, NO_STRIPBLANKS_ALL Overrides the STRIPBLANKS flag in all
# registered resources.
# TOPIXDATA_ALL, NO_TOPIXDATA_ALL Overrides the TOPIXDATA flag in all
# registered resources.
set(CG_OPTIONS COMPRESS_ALL NO_COMPRESS_ALL
STRIPBLANKS_ALL NO_STRIPBLANKS_ALL
TOPIXDATA_ALL NO_TOPIXDATA_ALL)
# Available one value options:
# TYPE Type of resource to create. Valid options are:
# EMBED_C: A C-file that can be compiled with your project.
# EMBED_H: A header that can be included into your project.
# BUNDLE: Generates a resource bundle file that can be loaded
# at runtime.
# AUTO: Determine from target file ending. Need to specify
# target argument.
# PREFIX Overrides the resource prefix that is prepended to each
# relative file name in registered resources.
# SOURCE_DIR Overrides the resources base directory to search for resources.
# Normally this is set to the source directory with that CMake
# was invoked (CMAKE_SOURCE_DIR).
# TARGET Overrides the name of the output file/-s. Normally the output
# names from glib-compile-resources tool is taken.
set(CG_ONEVALUEARGS TYPE PREFIX SOURCE_DIR TARGET)
# Available multi-value options:
# RESOURCES The list of resource files. Whether absolute or relative path is
# equal, absolute paths are stripped down to relative ones. If the
# absolute path is not inside the given base directory SOURCE_DIR
# or CMAKE_SOURCE_DIR (if SOURCE_DIR is not overriden), this
# function aborts.
# OPTIONS Extra command line options passed to glib-compile-resources.
set(CG_MULTIVALUEARGS RESOURCES OPTIONS)
# Parse the arguments.
cmake_parse_arguments(CG_ARG
"${CG_OPTIONS}"
"${CG_ONEVALUEARGS}"
"${CG_MULTIVALUEARGS}"
"${ARGN}")
# Variable to store the double-quote (") string. Since escaping
# double-quotes in strings is not possible we need a helper variable that
# does this job for us.
set(Q \")
# Check invocation validity with the <prefix>_UNPARSED_ARGUMENTS variable.
# If other not recognized parameters were passed, throw error.
if (CG_ARG_UNPARSED_ARGUMENTS)
set(CG_WARNMSG "Invocation of COMPILE_GRESOURCES with unrecognized")
set(CG_WARNMSG "${CG_WARNMSG} parameters. Parameters are:")
set(CG_WARNMSG "${CG_WARNMSG} ${CG_ARG_UNPARSED_ARGUMENTS}.")
message(WARNING ${CG_WARNMSG})
endif()
# Check invocation validity depending on generation mode (EMBED_C, EMBED_H
# or BUNDLE).
if ("${CG_ARG_TYPE}" STREQUAL "EMBED_C")
# EMBED_C mode, output compilable C-file.
set(CG_GENERATE_COMMAND_LINE "--generate-source")
set(CG_TARGET_FILE_ENDING "c")
elseif ("${CG_ARG_TYPE}" STREQUAL "EMBED_H")
# EMBED_H mode, output includable header file.
set(CG_GENERATE_COMMAND_LINE "--generate-header")
set(CG_TARGET_FILE_ENDING "h")
elseif ("${CG_ARG_TYPE}" STREQUAL "BUNDLE")
# BUNDLE mode, output resource bundle. Don't do anything since
# glib-compile-resources outputs a bundle when not specifying
# something else.
set(CG_TARGET_FILE_ENDING "gresource")
else()
# Everything else is AUTO mode, determine from target file ending.
if (CG_ARG_TARGET)
set(CG_GENERATE_COMMAND_LINE "--generate")
else()
set(CG_ERRMSG "AUTO mode given, but no target specified. Can't")
set(CG_ERRMSG "${CG_ERRMSG} determine output type. In function")
set(CG_ERRMSG "${CG_ERRMSG} COMPILE_GRESOURCES.")
message(FATAL_ERROR ${CG_ERRMSG})
endif()
endif()
# Check flag validity.
if (CG_ARG_COMPRESS_ALL AND CG_ARG_NO_COMPRESS_ALL)
set(CG_ERRMSG "COMPRESS_ALL and NO_COMPRESS_ALL simultaneously set. In")
set(CG_ERRMSG "${CG_ERRMSG} function COMPILE_GRESOURCES.")
message(FATAL_ERROR ${CG_ERRMSG})
endif()
if (CG_ARG_STRIPBLANKS_ALL AND CG_ARG_NO_STRIPBLANKS_ALL)
set(CG_ERRMSG "STRIPBLANKS_ALL and NO_STRIPBLANKS_ALL simultaneously")
set(CG_ERRMSG "${CG_ERRMSG} set. In function COMPILE_GRESOURCES.")
message(FATAL_ERROR ${CG_ERRMSG})
endif()
if (CG_ARG_TOPIXDATA_ALL AND CG_ARG_NO_TOPIXDATA_ALL)
set(CG_ERRMSG "TOPIXDATA_ALL and NO_TOPIXDATA_ALL simultaneously set.")
set(CG_ERRMSG "${CG_ERRMSG} In function COMPILE_GRESOURCES.")
message(FATAL_ERROR ${CG_ERRMSG})
endif()
# Check if there are any resources.
if (NOT CG_ARG_RESOURCES)
set(CG_ERRMSG "No resource files to process. In function")
set(CG_ERRMSG "${CG_ERRMSG} COMPILE_GRESOURCES.")
message(FATAL_ERROR ${CG_ERRMSG})
endif()
# Extract all dependencies for targets from resource list.
foreach(res ${CG_ARG_RESOURCES})
if (NOT(("${res}" STREQUAL "COMPRESS") OR
("${res}" STREQUAL "STRIPBLANKS") OR
("${res}" STREQUAL "TOPIXDATA")))
add_custom_command(
OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/resources/${res}"
COMMAND ${CMAKE_COMMAND} -E copy "${CG_ARG_SOURCE_DIR}/${res}" "${CMAKE_CURRENT_BINARY_DIR}/resources/${res}"
MAIN_DEPENDENCY "${CG_ARG_SOURCE_DIR}/${res}")
list(APPEND CG_RESOURCES_DEPENDENCIES "${CMAKE_CURRENT_BINARY_DIR}/resources/${res}")
endif()
endforeach()
# Construct .gresource.xml path.
set(CG_XML_FILE_PATH "${CMAKE_CURRENT_BINARY_DIR}/resources/.gresource.xml")
# Generate gresources XML target.
list(APPEND CG_CMAKE_SCRIPT_ARGS "-D")
list(APPEND CG_CMAKE_SCRIPT_ARGS "GXML_OUTPUT=${Q}${CG_XML_FILE_PATH}${Q}")
if(CG_ARG_COMPRESS_ALL)
list(APPEND CG_CMAKE_SCRIPT_ARGS "-D")
list(APPEND CG_CMAKE_SCRIPT_ARGS "GXML_COMPRESS_ALL")
endif()
if(CG_ARG_NO_COMPRESS_ALL)
list(APPEND CG_CMAKE_SCRIPT_ARGS "-D")
list(APPEND CG_CMAKE_SCRIPT_ARGS "GXML_NO_COMPRESS_ALL")
endif()
if(CG_ARG_STRPIBLANKS_ALL)
list(APPEND CG_CMAKE_SCRIPT_ARGS "-D")
list(APPEND CG_CMAKE_SCRIPT_ARGS "GXML_STRIPBLANKS_ALL")
endif()
if(CG_ARG_NO_STRIPBLANKS_ALL)
list(APPEND CG_CMAKE_SCRIPT_ARGS "-D")
list(APPEND CG_CMAKE_SCRIPT_ARGS "GXML_NO_STRIPBLANKS_ALL")
endif()
if(CG_ARG_TOPIXDATA_ALL)
list(APPEND CG_CMAKE_SCRIPT_ARGS "-D")
list(APPEND CG_CMAKE_SCRIPT_ARGS "GXML_TOPIXDATA_ALL")
endif()
if(CG_ARG_NO_TOPIXDATA_ALL)
list(APPEND CG_CMAKE_SCRIPT_ARGS "-D")
list(APPEND CG_CMAKE_SCRIPT_ARGS "GXML_NO_TOPIXDATA_ALL")
endif()
list(APPEND CG_CMAKE_SCRIPT_ARGS "-D")
list(APPEND CG_CMAKE_SCRIPT_ARGS "GXML_PREFIX=${Q}${CG_ARG_PREFIX}${Q}")
list(APPEND CG_CMAKE_SCRIPT_ARGS "-D")
list(APPEND CG_CMAKE_SCRIPT_ARGS
"GXML_RESOURCES=${Q}${CG_ARG_RESOURCES}${Q}")
list(APPEND CG_CMAKE_SCRIPT_ARGS "-P")
list(APPEND CG_CMAKE_SCRIPT_ARGS
"${Q}${GCR_CMAKE_MACRO_DIR}/BuildTargetScript.cmake${Q}")
get_filename_component(CG_XML_FILE_PATH_ONLY_NAME
"${CG_XML_FILE_PATH}" NAME)
set(CG_XML_CUSTOM_COMMAND_COMMENT
"Creating gresources XML file (${CG_XML_FILE_PATH_ONLY_NAME})")
add_custom_command(OUTPUT ${CG_XML_FILE_PATH}
COMMAND ${CMAKE_COMMAND}
ARGS ${CG_CMAKE_SCRIPT_ARGS}
DEPENDS ${CG_RESOURCES_DEPENDENCIES}
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
COMMENT ${CG_XML_CUSTOM_COMMAND_COMMENT})
# Create target manually if not set (to make sure glib-compile-resources
# doesn't change behaviour with it's naming standards).
if (NOT CG_ARG_TARGET)
set(CG_ARG_TARGET "${CMAKE_CURRENT_BINARY_DIR}/resources")
set(CG_ARG_TARGET "${CG_ARG_TARGET}.${CG_TARGET_FILE_ENDING}")
endif()
# Create source directory automatically if not set.
if (NOT CG_ARG_SOURCE_DIR)
set(CG_ARG_SOURCE_DIR "${CMAKE_SOURCE_DIR}")
endif()
# Add compilation target for resources.
add_custom_command(OUTPUT ${CG_ARG_TARGET}
COMMAND ${GLIB_COMPILE_RESOURCES_EXECUTABLE}
ARGS
${OPTIONS}
"--target=${Q}${CG_ARG_TARGET}${Q}"
"--sourcedir=${Q}${CG_ARG_SOURCE_DIR}${Q}"
${CG_GENERATE_COMMAND_LINE}
${CG_XML_FILE_PATH}
MAIN_DEPENDENCY ${CG_XML_FILE_PATH}
DEPENDS ${CG_RESOURCES_DEPENDENCIES}
WORKING_DIRECTORY ${CMAKE_BUILD_DIR})
# Set output and XML_OUT to parent scope.
set(${xml_out} ${CG_XML_FILE_PATH} PARENT_SCOPE)
set(${output} ${CG_ARG_TARGET} PARENT_SCOPE)
endfunction()

105
cmake/ComputeVersion.cmake Normal file
View File

@ -0,0 +1,105 @@
include(CMakeParseArguments)
function(_compute_version_from_file)
set_property(DIRECTORY APPEND PROPERTY CMAKE_CONFIGURE_DEPENDS ${CMAKE_SOURCE_DIR}/VERSION)
if (NOT EXISTS ${CMAKE_SOURCE_DIR}/VERSION)
set(VERSION_FOUND 0 PARENT_SCOPE)
return()
endif ()
file(STRINGS ${CMAKE_SOURCE_DIR}/VERSION VERSION_FILE)
string(REPLACE " " ";" VERSION_FILE "${VERSION_FILE}")
cmake_parse_arguments(VERSION_FILE "" "RELEASE;PRERELEASE" "" ${VERSION_FILE})
if (DEFINED VERSION_FILE_RELEASE)
string(STRIP "${VERSION_FILE_RELEASE}" VERSION_FILE_RELEASE)
set(VERSION_IS_RELEASE 1 PARENT_SCOPE)
set(VERSION_FULL "${VERSION_FILE_RELEASE}" PARENT_SCOPE)
set(VERSION_FOUND 1 PARENT_SCOPE)
elseif (DEFINED VERSION_FILE_PRERELEASE)
string(STRIP "${VERSION_FILE_PRERELEASE}" VERSION_FILE_PRERELEASE)
set(VERSION_IS_RELEASE 0 PARENT_SCOPE)
set(VERSION_FULL "${VERSION_FILE_PRERELEASE}" PARENT_SCOPE)
set(VERSION_FOUND 1 PARENT_SCOPE)
else ()
set(VERSION_FOUND 0 PARENT_SCOPE)
endif ()
endfunction(_compute_version_from_file)
function(_compute_version_from_git)
set_property(DIRECTORY APPEND PROPERTY CMAKE_CONFIGURE_DEPENDS ${CMAKE_SOURCE_DIR}/.git)
if (NOT GIT_EXECUTABLE)
find_package(Git QUIET)
if (NOT GIT_FOUND)
return()
endif ()
endif (NOT GIT_EXECUTABLE)
# Git tag
execute_process(
COMMAND "${GIT_EXECUTABLE}" describe --tags --abbrev=0
WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
RESULT_VARIABLE git_result
OUTPUT_VARIABLE git_tag
ERROR_VARIABLE git_error
OUTPUT_STRIP_TRAILING_WHITESPACE
ERROR_STRIP_TRAILING_WHITESPACE
)
if (NOT git_result EQUAL 0)
return()
endif (NOT git_result EQUAL 0)
if (git_tag MATCHES "^v?([0-9]+[.]?[0-9]*[.]?[0-9]*)(-[.0-9A-Za-z-]+)?([+][.0-9A-Za-z-]+)?$")
set(VERSION_LAST_RELEASE "${CMAKE_MATCH_1}")
else ()
return()
endif ()
# Git describe
execute_process(
COMMAND "${GIT_EXECUTABLE}" describe --tags
WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
RESULT_VARIABLE git_result
OUTPUT_VARIABLE git_describe
ERROR_VARIABLE git_error
OUTPUT_STRIP_TRAILING_WHITESPACE
ERROR_STRIP_TRAILING_WHITESPACE
)
if (NOT git_result EQUAL 0)
return()
endif (NOT git_result EQUAL 0)
if ("${git_tag}" STREQUAL "${git_describe}")
set(VERSION_IS_RELEASE 1)
else ()
set(VERSION_IS_RELEASE 0)
if (git_describe MATCHES "-([0-9]+)-g([0-9a-f]+)$")
set(VERSION_TAG_OFFSET "${CMAKE_MATCH_1}")
set(VERSION_COMMIT_HASH "${CMAKE_MATCH_2}")
endif ()
execute_process(
COMMAND "${GIT_EXECUTABLE}" show --format=%cd --date=format:%Y%m%d -s
WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
RESULT_VARIABLE git_result
OUTPUT_VARIABLE git_time
ERROR_VARIABLE git_error
OUTPUT_STRIP_TRAILING_WHITESPACE
ERROR_STRIP_TRAILING_WHITESPACE
)
if (NOT git_result EQUAL 0)
return()
endif (NOT git_result EQUAL 0)
set(VERSION_COMMIT_DATE "${git_time}")
endif ()
if (NOT VERSION_IS_RELEASE)
set(VERSION_SUFFIX "~git${VERSION_TAG_OFFSET}.${VERSION_COMMIT_DATE}.${VERSION_COMMIT_HASH}")
else (NOT VERSION_IS_RELEASE)
set(VERSION_SUFFIX "")
endif (NOT VERSION_IS_RELEASE)
set(VERSION_IS_RELEASE ${VERSION_IS_RELEASE} PARENT_SCOPE)
set(VERSION_FULL "${VERSION_LAST_RELEASE}${VERSION_SUFFIX}" PARENT_SCOPE)
set(VERSION_FOUND 1 PARENT_SCOPE)
endfunction(_compute_version_from_git)
_compute_version_from_file()
if (NOT VERSION_FOUND)
_compute_version_from_git()
endif (NOT VERSION_FOUND)

31
cmake/FindATK.cmake Normal file
View File

@ -0,0 +1,31 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(ATK
PKG_CONFIG_NAME atk
LIB_NAMES atk-1.0
INCLUDE_NAMES atk/atk.h
INCLUDE_DIR_SUFFIXES atk-1.0 atk-1.0/include
DEPENDS GObject
)
if(ATK_FOUND AND NOT ATK_VERSION)
find_file(ATK_VERSION_HEADER "atk/atkversion.h" HINTS ${ATK_INCLUDE_DIRS})
mark_as_advanced(ATK_VERSION_HEADER)
if(ATK_VERSION_HEADER)
file(STRINGS "${ATK_VERSION_HEADER}" ATK_MAJOR_VERSION REGEX "^#define ATK_MAJOR_VERSION +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define ATK_MAJOR_VERSION \\(?([0-9]+)\\)?$" "\\1" ATK_MAJOR_VERSION "${ATK_MAJOR_VERSION}")
file(STRINGS "${ATK_VERSION_HEADER}" ATK_MINOR_VERSION REGEX "^#define ATK_MINOR_VERSION +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define ATK_MINOR_VERSION \\(?([0-9]+)\\)?$" "\\1" ATK_MINOR_VERSION "${ATK_MINOR_VERSION}")
file(STRINGS "${ATK_VERSION_HEADER}" ATK_MICRO_VERSION REGEX "^#define ATK_MICRO_VERSION +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define ATK_MICRO_VERSION \\(?([0-9]+)\\)?$" "\\1" ATK_MICRO_VERSION "${ATK_MICRO_VERSION}")
set(ATK_VERSION "${ATK_MAJOR_VERSION}.${ATK_MINOR_VERSION}.${ATK_MICRO_VERSION}")
unset(ATK_MAJOR_VERSION)
unset(ATK_MINOR_VERSION)
unset(ATK_MICRO_VERSION)
endif()
endif()
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(ATK
REQUIRED_VARS ATK_LIBRARY
VERSION_VAR ATK_VERSION)

11
cmake/FindAdwaita.cmake Normal file
View File

@ -0,0 +1,11 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(Adwaita
PKG_CONFIG_NAME libadwaita-1
LIB_NAMES libadwaita-1
INCLUDE_NAMES adwaita.h
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Adwaita
REQUIRED_VARS Adwaita_LIBRARY
VERSION_VAR Adwaita_VERSION)

30
cmake/FindCairo.cmake Normal file
View File

@ -0,0 +1,30 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(Cairo
PKG_CONFIG_NAME cairo
LIB_NAMES cairo
INCLUDE_NAMES cairo.h
INCLUDE_DIR_SUFFIXES cairo cairo/include
)
if(Cairo_FOUND AND NOT Cairo_VERSION)
find_file(Cairo_VERSION_HEADER "cairo-version.h" HINTS ${Cairo_INCLUDE_DIRS})
mark_as_advanced(Cairo_VERSION_HEADER)
if(Cairo_VERSION_HEADER)
file(STRINGS "${Cairo_VERSION_HEADER}" Cairo_MAJOR_VERSION REGEX "^#define CAIRO_VERSION_MAJOR +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define CAIRO_VERSION_MAJOR \\(?([0-9]+)\\)?$" "\\1" Cairo_MAJOR_VERSION "${Cairo_MAJOR_VERSION}")
file(STRINGS "${Cairo_VERSION_HEADER}" Cairo_MINOR_VERSION REGEX "^#define CAIRO_VERSION_MINOR +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define CAIRO_VERSION_MINOR \\(?([0-9]+)\\)?$" "\\1" Cairo_MINOR_VERSION "${Cairo_MINOR_VERSION}")
file(STRINGS "${Cairo_VERSION_HEADER}" Cairo_MICRO_VERSION REGEX "^#define CAIRO_VERSION_MICRO +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define CAIRO_VERSION_MICRO \\(?([0-9]+)\\)?$" "\\1" Cairo_MICRO_VERSION "${Cairo_MICRO_VERSION}")
set(Cairo_VERSION "${Cairo_MAJOR_VERSION}.${Cairo_MINOR_VERSION}.${Cairo_MICRO_VERSION}")
unset(Cairo_MAJOR_VERSION)
unset(Cairo_MINOR_VERSION)
unset(Cairo_MICRO_VERSION)
endif()
endif()
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Cairo
REQUIRED_VARS Cairo_LIBRARY
VERSION_VAR Cairo_VERSION)

10
cmake/FindCanberra.cmake Normal file
View File

@ -0,0 +1,10 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(Canberra
PKG_CONFIG_NAME libcanberra
LIB_NAMES canberra
INCLUDE_NAMES canberra.h
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Canberra
REQUIRED_VARS Canberra_LIBRARY)

10
cmake/FindGCrypt.cmake Normal file
View File

@ -0,0 +1,10 @@
include(PkgConfigWithFallbackOnConfigScript)
find_pkg_config_with_fallback_on_config_script(GCrypt
PKG_CONFIG_NAME libgcrypt
CONFIG_SCRIPT_NAME libgcrypt
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(GCrypt
REQUIRED_VARS GCrypt_LIBRARY
VERSION_VAR GCrypt_VERSION)

38
cmake/FindGDK3.cmake Normal file
View File

@ -0,0 +1,38 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(GDK3
PKG_CONFIG_NAME gdk-3.0
LIB_NAMES gdk-3
INCLUDE_NAMES gdk/gdk.h
INCLUDE_DIR_SUFFIXES gtk-3.0 gtk-3.0/include gtk+-3.0 gtk+-3.0/include
DEPENDS Pango Cairo GDKPixbuf2
)
if(GDK3_FOUND AND NOT GDK3_VERSION)
find_file(GDK3_VERSION_HEADER "gdk/gdkversionmacros.h" HINTS ${GDK3_INCLUDE_DIRS})
mark_as_advanced(GDK3_VERSION_HEADER)
if(GDK3_VERSION_HEADER)
file(STRINGS "${GDK3_VERSION_HEADER}" GDK3_MAJOR_VERSION REGEX "^#define GDK_MAJOR_VERSION +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define GDK_MAJOR_VERSION \\(?([0-9]+)\\)?$" "\\1" GDK3_MAJOR_VERSION "${GDK3_MAJOR_VERSION}")
file(STRINGS "${GDK3_VERSION_HEADER}" GDK3_MINOR_VERSION REGEX "^#define GDK_MINOR_VERSION +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define GDK_MINOR_VERSION \\(?([0-9]+)\\)?$" "\\1" GDK3_MINOR_VERSION "${GDK3_MINOR_VERSION}")
file(STRINGS "${GDK3_VERSION_HEADER}" GDK3_MICRO_VERSION REGEX "^#define GDK_MICRO_VERSION +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define GDK_MICRO_VERSION \\(?([0-9]+)\\)?$" "\\1" GDK3_MICRO_VERSION "${GDK3_MICRO_VERSION}")
set(GDK3_VERSION "${GDK3_MAJOR_VERSION}.${GDK3_MINOR_VERSION}.${GDK3_MICRO_VERSION}")
unset(GDK3_MAJOR_VERSION)
unset(GDK3_MINOR_VERSION)
unset(GDK3_MICRO_VERSION)
endif()
endif()
if (GDK3_FOUND)
find_file(GDK3_WITH_X11 "gdk/gdkx.h" HINTS ${GDK3_INCLUDE_DIRS})
if (GDK3_WITH_X11)
set(GDK3_WITH_X11 yes CACHE INTERNAL "Does GDK3 support X11")
endif (GDK3_WITH_X11)
endif ()
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(GDK3
REQUIRED_VARS GDK3_LIBRARY
VERSION_VAR GDK3_VERSION)

38
cmake/FindGDK4.cmake Normal file
View File

@ -0,0 +1,38 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(GDK4
PKG_CONFIG_NAME gdk-4.0
LIB_NAMES gdk-4
INCLUDE_NAMES gdk/gdk.h
INCLUDE_DIR_SUFFIXES gtk-4.0 gtk-4.0/include gtk+-4.0 gtk+-4.0/include
DEPENDS Pango Cairo GDKPixbuf2
)
if(GDK4_FOUND AND NOT GDK4_VERSION)
find_file(GDK4_VERSION_HEADER "gdk/gdkversionmacros.h" HINTS ${GDK4_INCLUDE_DIRS})
mark_as_advanced(GDK4_VERSION_HEADER)
if(GDK4_VERSION_HEADER)
file(STRINGS "${GDK4_VERSION_HEADER}" GDK4_MAJOR_VERSION REGEX "^#define GDK_MAJOR_VERSION +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define GDK_MAJOR_VERSION \\(?([0-9]+)\\)?$" "\\1" GDK4_MAJOR_VERSION "${GDK4_MAJOR_VERSION}")
file(STRINGS "${GDK4_VERSION_HEADER}" GDK4_MINOR_VERSION REGEX "^#define GDK_MINOR_VERSION +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define GDK_MINOR_VERSION \\(?([0-9]+)\\)?$" "\\1" GDK4_MINOR_VERSION "${GDK4_MINOR_VERSION}")
file(STRINGS "${GDK4_VERSION_HEADER}" GDK4_MICRO_VERSION REGEX "^#define GDK_MICRO_VERSION +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define GDK_MICRO_VERSION \\(?([0-9]+)\\)?$" "\\1" GDK4_MICRO_VERSION "${GDK4_MICRO_VERSION}")
set(GDK4_VERSION "${GDK4_MAJOR_VERSION}.${GDK4_MINOR_VERSION}.${GDK4_MICRO_VERSION}")
unset(GDK4_MAJOR_VERSION)
unset(GDK4_MINOR_VERSION)
unset(GDK4_MICRO_VERSION)
endif()
endif()
if (GDK4_FOUND)
find_file(GDK4_WITH_X11 "gdk/gdkx.h" HINTS ${GDK4_INCLUDE_DIRS})
if (GDK4_WITH_X11)
set(GDK4_WITH_X11 yes CACHE INTERNAL "Does GDK4 support X11")
endif (GDK4_WITH_X11)
endif ()
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(GDK4
REQUIRED_VARS GDK4_LIBRARY
VERSION_VAR GDK4_VERSION)

View File

@ -0,0 +1,23 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(GDKPixbuf2
PKG_CONFIG_NAME gdk-pixbuf-2.0
LIB_NAMES gdk_pixbuf-2.0
INCLUDE_NAMES gdk-pixbuf/gdk-pixbuf.h
INCLUDE_DIR_SUFFIXES gdk-pixbuf-2.0 gdk-pixbuf-2.0/include
DEPENDS GLib
)
if(GDKPixbuf2_FOUND AND NOT GDKPixbuf2_VERSION)
find_file(GDKPixbuf2_FEATURES_HEADER "gdk-pixbuf/gdk-pixbuf-features.h" HINTS ${GDKPixbuf2_INCLUDE_DIRS})
mark_as_advanced(GDKPixbuf2_FEATURES_HEADER)
if(GDKPixbuf2_FEATURES_HEADER)
file(STRINGS "${GDKPixbuf2_FEATURES_HEADER}" GDKPixbuf2_VERSION REGEX "^#define GDK_PIXBUF_VERSION \\\"[^\\\"]+\\\"")
string(REGEX REPLACE "^#define GDK_PIXBUF_VERSION \\\"([0-9]+)\\.([0-9]+)\\.([0-9]+)\\\"$" "\\1.\\2.\\3" GDKPixbuf2_VERSION "${GDKPixbuf2_VERSION}")
endif()
endif()
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(GDKPixbuf2
REQUIRED_VARS GDKPixbuf2_LIBRARY
VERSION_VAR GDKPixbuf2_VERSION)

18
cmake/FindGIO.cmake Normal file
View File

@ -0,0 +1,18 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(GIO
PKG_CONFIG_NAME gio-2.0
LIB_NAMES gio-2.0
INCLUDE_NAMES gio/gio.h
INCLUDE_DIR_SUFFIXES glib-2.0 glib-2.0/include
DEPENDS GObject
)
if(GIO_FOUND AND NOT GIO_VERSION)
find_package(GLib ${GLib_GLOBAL_VERSION})
set(GIO_VERSION ${GLib_VERSION})
endif()
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(GIO
REQUIRED_VARS GIO_LIBRARY
VERSION_VAR GIO_VERSION)

32
cmake/FindGLib.cmake Normal file
View File

@ -0,0 +1,32 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(GLib
PKG_CONFIG_NAME glib-2.0
LIB_NAMES glib-2.0
INCLUDE_NAMES glib.h glibconfig.h
INCLUDE_DIR_HINTS ${CMAKE_LIBRARY_PATH} ${CMAKE_SYSTEM_LIBRARY_PATH}
INCLUDE_DIR_PATHS ${CMAKE_PREFIX_PATH}/lib64 ${CMAKE_PREFIX_PATH}/lib
INCLUDE_DIR_SUFFIXES glib-2.0 glib-2.0/include
)
if(GLib_FOUND AND NOT GLib_VERSION)
find_file(GLib_CONFIG_HEADER "glibconfig.h" HINTS ${GLib_INCLUDE_DIRS})
mark_as_advanced(GLib_CONFIG_HEADER)
if(GLib_CONFIG_HEADER)
file(STRINGS "${GLib_CONFIG_HEADER}" GLib_MAJOR_VERSION REGEX "^#define GLIB_MAJOR_VERSION +([0-9]+)")
string(REGEX REPLACE "^#define GLIB_MAJOR_VERSION ([0-9]+)$" "\\1" GLib_MAJOR_VERSION "${GLib_MAJOR_VERSION}")
file(STRINGS "${GLib_CONFIG_HEADER}" GLib_MINOR_VERSION REGEX "^#define GLIB_MINOR_VERSION +([0-9]+)")
string(REGEX REPLACE "^#define GLIB_MINOR_VERSION ([0-9]+)$" "\\1" GLib_MINOR_VERSION "${GLib_MINOR_VERSION}")
file(STRINGS "${GLib_CONFIG_HEADER}" GLib_MICRO_VERSION REGEX "^#define GLIB_MICRO_VERSION +([0-9]+)")
string(REGEX REPLACE "^#define GLIB_MICRO_VERSION ([0-9]+)$" "\\1" GLib_MICRO_VERSION "${GLib_MICRO_VERSION}")
set(GLib_VERSION "${GLib_MAJOR_VERSION}.${GLib_MINOR_VERSION}.${GLib_MICRO_VERSION}")
unset(GLib_MAJOR_VERSION)
unset(GLib_MINOR_VERSION)
unset(GLib_MICRO_VERSION)
endif()
endif()
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(GLib
REQUIRED_VARS GLib_LIBRARY
VERSION_VAR GLib_VERSION)

19
cmake/FindGModule.cmake Normal file
View File

@ -0,0 +1,19 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(GModule
PKG_CONFIG_NAME gmodule-2.0
LIB_NAMES gmodule-2.0
INCLUDE_NAMES gmodule.h
INCLUDE_DIR_SUFFIXES glib-2.0 glib-2.0/include
DEPENDS GLib
)
if(GModule_FOUND AND NOT GModule_VERSION)
# TODO
find_package(GLib ${GLib_GLOBAL_VERSION})
set(GModule_VERSION ${GLib_VERSION})
endif()
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(GModule
REQUIRED_VARS GModule_LIBRARY
VERSION_VAR GModule_VERSION)

19
cmake/FindGObject.cmake Normal file
View File

@ -0,0 +1,19 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(GObject
PKG_CONFIG_NAME gobject-2.0
LIB_NAMES gobject-2.0
INCLUDE_NAMES gobject/gobject.h
INCLUDE_DIR_SUFFIXES glib-2.0 glib-2.0/include
DEPENDS GLib
)
if(GObject_FOUND AND NOT GObject_VERSION)
# TODO
find_package(GLib ${GLib_GLOBAL_VERSION})
set(GObject_VERSION ${GLib_VERSION})
endif()
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(GObject
REQUIRED_VARS GObject_LIBRARY
VERSION_VAR GObject_VERSION)

10
cmake/FindGPGME.cmake Normal file
View File

@ -0,0 +1,10 @@
include(PkgConfigWithFallbackOnConfigScript)
find_pkg_config_with_fallback_on_config_script(GPGME
PKG_CONFIG_NAME gpgme
CONFIG_SCRIPT_NAME gpgme
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(GPGME
REQUIRED_VARS GPGME_LIBRARY
VERSION_VAR GPGME_VERSION)

31
cmake/FindGTK3.cmake Normal file
View File

@ -0,0 +1,31 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(GTK3
PKG_CONFIG_NAME gtk+-3.0
LIB_NAMES gtk-3
INCLUDE_NAMES gtk/gtk.h
INCLUDE_DIR_SUFFIXES gtk-3.0 gtk-3.0/include gtk+-3.0 gtk+-3.0/include
DEPENDS GDK3 ATK
)
if(GTK3_FOUND AND NOT GTK3_VERSION)
find_file(GTK3_VERSION_HEADER "gtk/gtkversion.h" HINTS ${GTK3_INCLUDE_DIRS})
mark_as_advanced(GTK3_VERSION_HEADER)
if(GTK3_VERSION_HEADER)
file(STRINGS "${GTK3_VERSION_HEADER}" GTK3_MAJOR_VERSION REGEX "^#define GTK_MAJOR_VERSION +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define GTK_MAJOR_VERSION \\(?([0-9]+)\\)?$" "\\1" GTK3_MAJOR_VERSION "${GTK3_MAJOR_VERSION}")
file(STRINGS "${GTK3_VERSION_HEADER}" GTK3_MINOR_VERSION REGEX "^#define GTK_MINOR_VERSION +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define GTK_MINOR_VERSION \\(?([0-9]+)\\)?$" "\\1" GTK3_MINOR_VERSION "${GTK3_MINOR_VERSION}")
file(STRINGS "${GTK3_VERSION_HEADER}" GTK3_MICRO_VERSION REGEX "^#define GTK_MICRO_VERSION +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define GTK_MICRO_VERSION \\(?([0-9]+)\\)?$" "\\1" GTK3_MICRO_VERSION "${GTK3_MICRO_VERSION}")
set(GTK3_VERSION "${GTK3_MAJOR_VERSION}.${GTK3_MINOR_VERSION}.${GTK3_MICRO_VERSION}")
unset(GTK3_MAJOR_VERSION)
unset(GTK3_MINOR_VERSION)
unset(GTK3_MICRO_VERSION)
endif()
endif()
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(GTK3
REQUIRED_VARS GTK3_LIBRARY
VERSION_VAR GTK3_VERSION)

30
cmake/FindGTK4.cmake Normal file
View File

@ -0,0 +1,30 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(GTK4
PKG_CONFIG_NAME gtk4
LIB_NAMES gtk-4
INCLUDE_NAMES gtk/gtk.h
INCLUDE_DIR_SUFFIXES gtk-4.0 gtk-4.0/include gtk+-4.0 gtk+-4.0/include gtk4 gtk4/include
)
if(GTK4_FOUND AND NOT GTK4_VERSION)
find_file(GTK4_VERSION_HEADER "gtk/gtkversion.h" HINTS ${GTK4_INCLUDE_DIRS})
mark_as_advanced(GTK4_VERSION_HEADER)
if(GTK4_VERSION_HEADER)
file(STRINGS "${GTK4_VERSION_HEADER}" GTK4_MAJOR_VERSION REGEX "^#define GTK_MAJOR_VERSION +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define GTK_MAJOR_VERSION \\(?([0-9]+)\\)?$" "\\1" GTK4_MAJOR_VERSION "${GTK4_MAJOR_VERSION}")
file(STRINGS "${GTK4_VERSION_HEADER}" GTK4_MINOR_VERSION REGEX "^#define GTK_MINOR_VERSION +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define GTK_MINOR_VERSION \\(?([0-9]+)\\)?$" "\\1" GTK4_MINOR_VERSION "${GTK4_MINOR_VERSION}")
file(STRINGS "${GTK4_VERSION_HEADER}" GTK4_MICRO_VERSION REGEX "^#define GTK_MICRO_VERSION +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define GTK_MICRO_VERSION \\(?([0-9]+)\\)?$" "\\1" GTK4_MICRO_VERSION "${GTK4_MICRO_VERSION}")
set(GTK4_VERSION "${GTK4_MAJOR_VERSION}.${GTK4_MINOR_VERSION}.${GTK4_MICRO_VERSION}")
unset(GTK4_MAJOR_VERSION)
unset(GTK4_MINOR_VERSION)
unset(GTK4_MICRO_VERSION)
endif()
endif()
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(GTK4
REQUIRED_VARS GTK4_LIBRARY
VERSION_VAR GTK4_VERSION)

13
cmake/FindGee.cmake Normal file
View File

@ -0,0 +1,13 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(Gee
PKG_CONFIG_NAME gee-0.8
LIB_NAMES gee-0.8
INCLUDE_NAMES gee.h
INCLUDE_DIR_SUFFIXES gee-0.8 gee-0.8/include
DEPENDS GObject
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Gee
REQUIRED_VARS Gee_LIBRARY
VERSION_VAR Gee_VERSION)

20
cmake/FindGettext.cmake Normal file
View File

@ -0,0 +1,20 @@
find_program(XGETTEXT_EXECUTABLE xgettext)
find_program(MSGMERGE_EXECUTABLE msgmerge)
find_program(MSGFMT_EXECUTABLE msgfmt)
find_program(MSGCAT_EXECUTABLE msgcat)
mark_as_advanced(XGETTEXT_EXECUTABLE MSGMERGE_EXECUTABLE MSGFMT_EXECUTABLE MSGCAT_EXECUTABLE)
if(XGETTEXT_EXECUTABLE)
execute_process(COMMAND ${XGETTEXT_EXECUTABLE} "--version"
OUTPUT_VARIABLE Gettext_VERSION
OUTPUT_STRIP_TRAILING_WHITESPACE)
string(REGEX REPLACE "xgettext \\(GNU gettext-tools\\) ([0-9\\.]*).*" "\\1" Gettext_VERSION "${Gettext_VERSION}")
endif(XGETTEXT_EXECUTABLE)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Gettext
FOUND_VAR Gettext_FOUND
REQUIRED_VARS XGETTEXT_EXECUTABLE MSGMERGE_EXECUTABLE MSGFMT_EXECUTABLE MSGCAT_EXECUTABLE
VERSION_VAR Gettext_VERSION)
set(GETTEXT_USE_FILE "${CMAKE_CURRENT_LIST_DIR}/UseGettext.cmake")

13
cmake/FindGnuTLS.cmake Normal file
View File

@ -0,0 +1,13 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(GnuTLS
PKG_CONFIG_NAME gnutls
LIB_NAMES gnutls
INCLUDE_NAMES gnutls/gnutls.h
INCLUDE_DIR_SUFFIXES gnutls gnutls/include
DEPENDS GLib
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(GnuTLS
REQUIRED_VARS GnuTLS_LIBRARY
VERSION_VAR GnuTLS_VERSION)

14
cmake/FindGspell.cmake Normal file
View File

@ -0,0 +1,14 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(Gspell
PKG_CONFIG_NAME gspell-1
LIB_NAMES gspell-1
INCLUDE_NAMES gspell.h
INCLUDE_DIR_SUFFIXES gspell-1 gspell-1/gspell
DEPENDS GTK3
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Gspell
REQUIRED_VARS Gspell_LIBRARY
VERSION_VAR Gspell_VERSION)

12
cmake/FindGst.cmake Normal file
View File

@ -0,0 +1,12 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(Gst
PKG_CONFIG_NAME gstreamer-1.0
LIB_NAMES gstreamer-1.0
INCLUDE_NAMES gst/gst.h
INCLUDE_DIR_SUFFIXES gstreamer-1.0 gstreamer-1.0/include
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Gst
REQUIRED_VARS Gst_LIBRARY
VERSION_VAR Gst_VERSION)

14
cmake/FindGstApp.cmake Normal file
View File

@ -0,0 +1,14 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(GstApp
PKG_CONFIG_NAME gstreamer-app-1.0
LIB_NAMES gstapp
LIB_DIR_HINTS gstreamer-1.0
INCLUDE_NAMES gst/app/app.h
INCLUDE_DIR_SUFFIXES gstreamer-1.0 gstreamer-1.0/include gstreamer-app-1.0 gstreamer-app-1.0/include
DEPENDS Gst
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(GstApp
REQUIRED_VARS GstApp_LIBRARY
VERSION_VAR GstApp_VERSION)

14
cmake/FindGstAudio.cmake Normal file
View File

@ -0,0 +1,14 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(GstAudio
PKG_CONFIG_NAME gstreamer-audio-1.0
LIB_NAMES gstaudio
LIB_DIR_HINTS gstreamer-1.0
INCLUDE_NAMES gst/audio/audio.h
INCLUDE_DIR_SUFFIXES gstreamer-1.0 gstreamer-1.0/include gstreamer-audio-1.0 gstreamer-audio-1.0/include
DEPENDS Gst
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(GstAudio
REQUIRED_VARS GstAudio_LIBRARY
VERSION_VAR GstAudio_VERSION)

19
cmake/FindGstRtp.cmake Normal file
View File

@ -0,0 +1,19 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(GstRtp
PKG_CONFIG_NAME gstreamer-rtp-1.0
LIB_NAMES gstrtp
LIB_DIR_HINTS gstreamer-1.0
INCLUDE_NAMES gst/rtp/rtp.h
INCLUDE_DIR_SUFFIXES gstreamer-1.0 gstreamer-1.0/include gstreamer-rtp-1.0 gstreamer-rtp-1.0/include
DEPENDS Gst
)
if(GstRtp_FOUND AND NOT GstRtp_VERSION)
find_package(Gst)
set(GstRtp_VERSION ${Gst_VERSION})
endif()
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(GstRtp
REQUIRED_VARS GstRtp_LIBRARY
VERSION_VAR GstRtp_VERSION)

14
cmake/FindGstVideo.cmake Normal file
View File

@ -0,0 +1,14 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(GstVideo
PKG_CONFIG_NAME gstreamer-video-1.0
LIB_NAMES gstvideo
LIB_DIR_HINTS gstreamer-1.0
INCLUDE_NAMES gst/video/video.h
INCLUDE_DIR_SUFFIXES gstreamer-1.0 gstreamer-1.0/include gstreamer-video-1.0 gstreamer-video-1.0/include
DEPENDS Gst
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(GstVideo
REQUIRED_VARS GstVideo_LIBRARY
VERSION_VAR GstVideo_VERSION)

11
cmake/FindICU.cmake Normal file
View File

@ -0,0 +1,11 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(ICU
PKG_CONFIG_NAME icu-uc
LIB_NAMES icuuc icudata
INCLUDE_NAMES unicode/umachine.h
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(ICU
REQUIRED_VARS ICU_LIBRARY
VERSION_VAR ICU_VERSION)

13
cmake/FindNice.cmake Normal file
View File

@ -0,0 +1,13 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(Nice
PKG_CONFIG_NAME nice
LIB_NAMES nice
INCLUDE_NAMES nice.h
INCLUDE_DIR_SUFFIXES nice nice/include
DEPENDS GIO
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Nice
REQUIRED_VARS Nice_LIBRARY
VERSION_VAR Nice_VERSION)

33
cmake/FindPango.cmake Normal file
View File

@ -0,0 +1,33 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(Pango
PKG_CONFIG_NAME pango
LIB_NAMES pango-1.0
INCLUDE_NAMES pango/pango.h
INCLUDE_DIR_SUFFIXES pango-1.0 pango-1.0/include
DEPENDS GObject
)
if(Pango_FOUND AND NOT Pango_VERSION)
find_file(Pango_FEATURES_HEADER "pango/pango-features.h" HINTS ${Pango_INCLUDE_DIRS})
mark_as_advanced(Pango_FEATURES_HEADER)
if(Pango_FEATURES_HEADER)
file(STRINGS "${Pango_FEATURES_HEADER}" Pango_MAJOR_VERSION REGEX "^#define PANGO_VERSION_MAJOR +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define PANGO_VERSION_MAJOR \\(?([0-9]+)\\)?$" "\\1" Pango_MAJOR_VERSION "${Pango_MAJOR_VERSION}")
file(STRINGS "${Pango_FEATURES_HEADER}" Pango_MINOR_VERSION REGEX "^#define PANGO_VERSION_MINOR +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define PANGO_VERSION_MINOR \\(?([0-9]+)\\)?$" "\\1" Pango_MINOR_VERSION "${Pango_MINOR_VERSION}")
file(STRINGS "${Pango_FEATURES_HEADER}" Pango_MICRO_VERSION REGEX "^#define PANGO_VERSION_MICRO +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define PANGO_VERSION_MICRO \\(?([0-9]+)\\)?$" "\\1" Pango_MICRO_VERSION "${Pango_MICRO_VERSION}")
set(Pango_VERSION "${Pango_MAJOR_VERSION}.${Pango_MINOR_VERSION}.${Pango_MICRO_VERSION}")
unset(Pango_MAJOR_VERSION)
unset(Pango_MINOR_VERSION)
unset(Pango_MICRO_VERSION)
endif()
endif()
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Pango
FOUND_VAR Pango_FOUND
REQUIRED_VARS Pango_LIBRARY
VERSION_VAR Pango_VERSION
)

11
cmake/FindQrencode.cmake Normal file
View File

@ -0,0 +1,11 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(Qrencode
PKG_CONFIG_NAME libqrencode
LIB_NAMES qrencode
INCLUDE_NAMES qrencode.h
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Qrencode
REQUIRED_VARS Qrencode_LIBRARY
VERSION_VAR Qrencode_VERSION)

21
cmake/FindSQLite3.cmake Normal file
View File

@ -0,0 +1,21 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(SQLite3
PKG_CONFIG_NAME sqlite3
LIB_NAMES sqlite3
INCLUDE_NAMES sqlite3.h
)
if(SQLite3_FOUND AND NOT SQLite3_VERSION)
find_file(SQLite3_HEADER "sqlite3.h" HINTS ${SQLite3_INCLUDE_DIRS})
mark_as_advanced(SQLite3_HEADER)
if(SQLite3_HEADER)
file(STRINGS "${SQLite3_HEADER}" SQLite3_VERSION REGEX "^#define SQLITE_VERSION +\\\"[^\\\"]+\\\"")
string(REGEX REPLACE "^#define SQLITE_VERSION +\\\"([0-9]+)\\.([0-9]+)\\.([0-9]+)\\\"$" "\\1.\\2.\\3" SQLite3_VERSION "${SQLite3_VERSION}")
endif()
endif()
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(SQLite3
REQUIRED_VARS SQLite3_LIBRARY
VERSION_VAR SQLite3_VERSION)

View File

@ -0,0 +1,11 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(SignalProtocol
PKG_CONFIG_NAME libsignal-protocol-c
LIB_NAMES signal-protocol-c
INCLUDE_NAMES signal/signal_protocol.h
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(SignalProtocol
REQUIRED_VARS SignalProtocol_LIBRARY
VERSION_VAR SignalProtocol_VERSION)

31
cmake/FindSoup2.cmake Normal file
View File

@ -0,0 +1,31 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(Soup2
PKG_CONFIG_NAME libsoup-2.4
LIB_NAMES soup-2.4
INCLUDE_NAMES libsoup/soup.h
INCLUDE_DIR_SUFFIXES libsoup-2.4 libsoup-2.4/include libsoup libsoup/include
DEPENDS GIO
)
if(Soup2_FOUND AND NOT Soup2_VERSION)
find_file(Soup2_VERSION_HEADER "libsoup/soup-version.h" HINTS ${Soup_INCLUDE_DIRS})
mark_as_advanced(Soup2_VERSION_HEADER)
if(Soup_VERSION_HEADER)
file(STRINGS "${Soup2_VERSION_HEADER}" Soup2_MAJOR_VERSION REGEX "^#define SOUP_MAJOR_VERSION +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define SOUP_MAJOR_VERSION \\(?([0-9]+)\\)?$" "\\1" Soup_MAJOR_VERSION "${Soup2_MAJOR_VERSION}")
file(STRINGS "${Soup2_VERSION_HEADER}" Soup2_MINOR_VERSION REGEX "^#define SOUP_MINOR_VERSION +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define SOUP_MINOR_VERSION \\(?([0-9]+)\\)?$" "\\1" Soup_MINOR_VERSION "${Soup2_MINOR_VERSION}")
file(STRINGS "${Soup2_VERSION_HEADER}" Soup2_MICRO_VERSION REGEX "^#define SOUP_MICRO_VERSION +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define SOUP_MICRO_VERSION \\(?([0-9]+)\\)?$" "\\1" Soup_MICRO_VERSION "${Soup2_MICRO_VERSION}")
set(Soup_VERSION "${Soup2_MAJOR_VERSION}.${Soup2_MINOR_VERSION}.${Soup2_MICRO_VERSION}")
unset(Soup2_MAJOR_VERSION)
unset(Soup2_MINOR_VERSION)
unset(Soup2_MICRO_VERSION)
endif()
endif()
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Soup2
REQUIRED_VARS Soup2_LIBRARY
VERSION_VAR Soup2_VERSION)

31
cmake/FindSoup3.cmake Normal file
View File

@ -0,0 +1,31 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(Soup3
PKG_CONFIG_NAME libsoup-3.0
LIB_NAMES soup-3.0
INCLUDE_NAMES libsoup/soup.h
INCLUDE_DIR_SUFFIXES libsoup-2.4 libsoup-2.4/include libsoup libsoup/include
DEPENDS GIO
)
if(Soup3_FOUND AND NOT Soup3_VERSION)
find_file(Soup3_VERSION_HEADER "libsoup/soup-version.h" HINTS ${Soup3_INCLUDE_DIRS})
mark_as_advanced(Soup3_VERSION_HEADER)
if(Soup3_VERSION_HEADER)
file(STRINGS "${Soup3_VERSION_HEADER}" Soup3_MAJOR_VERSION REGEX "^#define SOUP_MAJOR_VERSION +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define SOUP_MAJOR_VERSION \\(?([0-9]+)\\)?$" "\\1" Soup3_MAJOR_VERSION "${Soup3_MAJOR_VERSION}")
file(STRINGS "${Soup3_VERSION_HEADER}" Soup3_MINOR_VERSION REGEX "^#define SOUP_MINOR_VERSION +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define SOUP_MINOR_VERSION \\(?([0-9]+)\\)?$" "\\1" Soup3_MINOR_VERSION "${Soup3_MINOR_VERSION}")
file(STRINGS "${Soup3_VERSION_HEADER}" Soup3_MICRO_VERSION REGEX "^#define SOUP_MICRO_VERSION +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define SOUP_MICRO_VERSION \\(?([0-9]+)\\)?$" "\\1" Soup3_MICRO_VERSION "${Soup3_MICRO_VERSION}")
set(Soup3_VERSION "${Soup3_MAJOR_VERSION}.${Soup3_MINOR_VERSION}.${Soup3_MICRO_VERSION}")
unset(Soup3_MAJOR_VERSION)
unset(Soup3_MINOR_VERSION)
unset(Soup3_MICRO_VERSION)
endif()
endif()
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Soup3
REQUIRED_VARS Soup3_LIBRARY
VERSION_VAR Soup3_VERSION)

12
cmake/FindSrtp2.cmake Normal file
View File

@ -0,0 +1,12 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(Srtp2
PKG_CONFIG_NAME libsrtp2
LIB_NAMES srtp2
INCLUDE_NAMES srtp2/srtp.h
INCLUDE_DIR_SUFFIXES srtp2 srtp2/include
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Srtp2
REQUIRED_VARS Srtp2_LIBRARY
VERSION_VAR Srtp2_VERSION)

73
cmake/FindVala.cmake Normal file
View File

@ -0,0 +1,73 @@
##
# Find module for the Vala compiler (valac)
#
# This module determines wheter a Vala compiler is installed on the current
# system and where its executable is.
#
# Call the module using "find_package(Vala) from within your CMakeLists.txt.
#
# The following variables will be set after an invocation:
#
# VALA_FOUND Whether the vala compiler has been found or not
# VALA_EXECUTABLE Full path to the valac executable if it has been found
# VALA_VERSION Version number of the available valac
# VALA_USE_FILE Include this file to define the vala_precompile function
##
##
# Copyright 2009-2010 Jakob Westhoff. All rights reserved.
# Copyright 2010-2011 Daniel Pfeifer
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY JAKOB WESTHOFF ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
# EVENT SHALL JAKOB WESTHOFF OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are those
# of the authors and should not be interpreted as representing official policies,
# either expressed or implied, of Jakob Westhoff
##
# Search for the valac executable in the usual system paths
# Some distributions rename the valac to contain the major.minor in the binary name
find_package(GObject REQUIRED)
find_program(VALA_EXECUTABLE NAMES valac valac-0.38 valac-0.36 valac-0.34 valac-0.32)
mark_as_advanced(VALA_EXECUTABLE)
# Determine the valac version
if(VALA_EXECUTABLE)
file(TO_NATIVE_PATH "${VALA_EXECUTABLE}" VALA_EXECUTABLE)
execute_process(COMMAND ${VALA_EXECUTABLE} "--version"
OUTPUT_VARIABLE VALA_VERSION
OUTPUT_STRIP_TRAILING_WHITESPACE)
string(REPLACE "Vala " "" VALA_VERSION "${VALA_VERSION}")
endif(VALA_EXECUTABLE)
# Handle the QUIETLY and REQUIRED arguments, which may be given to the find call.
# Furthermore set VALA_FOUND to TRUE if Vala has been found (aka.
# VALA_EXECUTABLE is set)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Vala
FOUND_VAR VALA_FOUND
REQUIRED_VARS VALA_EXECUTABLE
VERSION_VAR VALA_VERSION)
set(VALA_USE_FILE "${CMAKE_CURRENT_LIST_DIR}/UseVala.cmake")

View File

@ -0,0 +1,12 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(WebRTCAudioProcessing
PKG_CONFIG_NAME webrtc-audio-processing
LIB_NAMES webrtc_audio_processing
INCLUDE_NAMES webrtc/modules/audio_processing/include/audio_processing.h
INCLUDE_DIR_SUFFIXES webrtc-audio-processing webrtc_audio_processing
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(WebRTCAudioProcessing
REQUIRED_VARS WebRTCAudioProcessing_LIBRARY
VERSION_VAR WebRTCAudioProcessing_VERSION)

124
cmake/GenerateGXML.cmake Normal file
View File

@ -0,0 +1,124 @@
include(CMakeParseArguments)
# Generates the resource XML controlling file from resource list (and saves it
# to xml_path). It's not recommended to use this function directly, since it
# doesn't handle invalid arguments. It is used by the function
# COMPILE_GRESOURCES() to create a custom command, so that this function is
# invoked at build-time in script mode from CMake.
function(GENERATE_GXML xml_path)
# Available options:
# COMPRESS_ALL, NO_COMPRESS_ALL Overrides the COMPRESS flag in all
# registered resources.
# STRIPBLANKS_ALL, NO_STRIPBLANKS_ALL Overrides the STRIPBLANKS flag in all
# registered resources.
# TOPIXDATA_ALL, NO_TOPIXDATA_ALL Overrides the TOPIXDATA flag in all
# registered resources.
set(GXML_OPTIONS COMPRESS_ALL NO_COMPRESS_ALL
STRIPBLANKS_ALL NO_STRIPBLANKS_ALL
TOPIXDATA_ALL NO_TOPIXDATA_ALL)
# Available one value options:
# PREFIX Overrides the resource prefix that is prepended to each
# relative file name in registered resources.
set(GXML_ONEVALUEARGS PREFIX)
# Available multi-value options:
# RESOURCES The list of resource files. Whether absolute or relative path is
# equal, absolute paths are stripped down to relative ones. If the
# absolute path is not inside the given base directory SOURCE_DIR
# or CMAKE_SOURCE_DIR (if SOURCE_DIR is not overriden), this
# function aborts.
set(GXML_MULTIVALUEARGS RESOURCES)
# Parse the arguments.
cmake_parse_arguments(GXML_ARG
"${GXML_OPTIONS}"
"${GXML_ONEVALUEARGS}"
"${GXML_MULTIVALUEARGS}"
"${ARGN}")
# Variable to store the double-quote (") string. Since escaping
# double-quotes in strings is not possible we need a helper variable that
# does this job for us.
set(Q \")
# Process resources and generate XML file.
# Begin with the XML header and header nodes.
set(GXML_XML_FILE "<?xml version=${Q}1.0${Q} encoding=${Q}UTF-8${Q}?>")
set(GXML_XML_FILE "${GXML_XML_FILE}<gresources><gresource prefix=${Q}")
# Set the prefix for the resources. Depending on the user-override we choose
# the standard prefix "/" or the override.
if (GXML_ARG_PREFIX)
set(GXML_XML_FILE "${GXML_XML_FILE}${GXML_ARG_PREFIX}")
else()
set(GXML_XML_FILE "${GXML_XML_FILE}/")
endif()
set(GXML_XML_FILE "${GXML_XML_FILE}${Q}>")
# Process each resource.
foreach(res ${GXML_ARG_RESOURCES})
if ("${res}" STREQUAL "COMPRESS")
set(GXML_COMPRESSION_FLAG ON)
elseif ("${res}" STREQUAL "STRIPBLANKS")
set(GXML_STRIPBLANKS_FLAG ON)
elseif ("${res}" STREQUAL "TOPIXDATA")
set(GXML_TOPIXDATA_FLAG ON)
else()
# The file name.
set(GXML_RESOURCE_PATH "${res}")
# Append to real resource file dependency list.
list(APPEND GXML_RESOURCES_DEPENDENCIES ${GXML_RESOURCE_PATH})
# Assemble <file> node.
set(GXML_RES_LINE "<file")
if ((GXML_ARG_COMPRESS_ALL OR GXML_COMPRESSION_FLAG) AND NOT
GXML_ARG_NO_COMPRESS_ALL)
set(GXML_RES_LINE "${GXML_RES_LINE} compressed=${Q}true${Q}")
endif()
# Check preprocess flag validity.
if ((GXML_ARG_STRIPBLANKS_ALL OR GXML_STRIPBLANKS_FLAG) AND
(GXML_ARG_TOPIXDATA_ALL OR GXML_TOPIXDATA_FLAG))
set(GXML_ERRMSG "Resource preprocessing option conflict. Tried")
set(GXML_ERRMSG "${GXML_ERRMSG} to specify both, STRIPBLANKS")
set(GXML_ERRMSG "${GXML_ERRMSG} and TOPIXDATA. In resource")
set(GXML_ERRMSG "${GXML_ERRMSG} ${GXML_RESOURCE_PATH} in")
set(GXML_ERRMSG "${GXML_ERRMSG} function COMPILE_GRESOURCES.")
message(FATAL_ERROR ${GXML_ERRMSG})
endif()
if ((GXML_ARG_STRIPBLANKS_ALL OR GXML_STRIPBLANKS_FLAG) AND NOT
GXML_ARG_NO_STRIPBLANKS_ALL)
set(GXML_RES_LINE "${GXML_RES_LINE} preprocess=")
set(GXML_RES_LINE "${GXML_RES_LINE}${Q}xml-stripblanks${Q}")
elseif((GXML_ARG_TOPIXDATA_ALL OR GXML_TOPIXDATA_FLAG) AND NOT
GXML_ARG_NO_TOPIXDATA_ALL)
set(GXML_RES_LINE "${GXML_RES_LINE} preprocess=")
set(GXML_RES_LINE "${GXML_RES_LINE}${Q}to-pixdata${Q}")
endif()
set(GXML_RES_LINE "${GXML_RES_LINE}>${GXML_RESOURCE_PATH}</file>")
# Append to file string.
set(GXML_XML_FILE "${GXML_XML_FILE}${GXML_RES_LINE}")
# Unset variables.
unset(GXML_COMPRESSION_FLAG)
unset(GXML_STRIPBLANKS_FLAG)
unset(GXML_TOPIXDATA_FLAG)
endif()
endforeach()
# Append closing nodes.
set(GXML_XML_FILE "${GXML_XML_FILE}</gresource></gresources>")
# Use "file" function to generate XML controlling file.
get_filename_component(xml_path_only_name "${xml_path}" NAME)
file(WRITE ${xml_path} ${GXML_XML_FILE})
endfunction()

View File

@ -0,0 +1,11 @@
# Path to this file.
set(GCR_CMAKE_MACRO_DIR ${CMAKE_CURRENT_LIST_DIR})
# Finds the glib-compile-resources executable.
find_program(GLIB_COMPILE_RESOURCES_EXECUTABLE glib-compile-resources)
mark_as_advanced(GLIB_COMPILE_RESOURCES_EXECUTABLE)
# Include the cmake files containing the functions.
include(${GCR_CMAKE_MACRO_DIR}/CompileGResources.cmake)
include(${GCR_CMAKE_MACRO_DIR}/GenerateGXML.cmake)

11
cmake/LargeFileOffsets.c Normal file
View File

@ -0,0 +1,11 @@
#include <sys/types.h>
#define _K ((off_t)1024)
#define _M ((off_t)1024 * _K)
#define _G ((off_t)1024 * _M)
#define _T ((off_t)1024 * _G)
int test[(((64 * _G -1) % 671088649) == 268434537) && (((_T - (64 * _G -1) + 255) % 1792151290) == 305159546)? 1: -1];
int main() {
return 0;
}

47
cmake/MultiFind.cmake Normal file
View File

@ -0,0 +1,47 @@
include(CMakeParseArguments)
function(find_packages result)
cmake_parse_arguments(ARGS "" "" "REQUIRED;OPTIONAL" ${ARGN})
set(_res "")
set(_res_libs "")
foreach(pkg ${ARGS_REQUIRED})
string(REPLACE ">=" ";" pkg_ ${pkg})
list(GET pkg_ "0" pkg)
list(LENGTH pkg_ pkg_has_version)
if(pkg_has_version GREATER 1)
list(GET pkg_ "1" pkg_version)
else()
if(${pkg}_GLOBAL_VERSION)
set(pkg_version ${${pkg}_GLOBAL_VERSION})
else()
unset(pkg_version)
endif()
endif()
find_package(${pkg} ${pkg_version} REQUIRED)
list(APPEND _res ${${pkg}_PKG_CONFIG_NAME})
list(APPEND _res_libs ${${pkg}_LIBRARIES})
set(${pkg}_VERSION "${${pkg}_VERSION}" PARENT_SCOPE)
endforeach(pkg)
foreach(pkg ${ARGS_OPTIONAL})
string(REPLACE ">=" ";" pkg_ ${pkg})
list(GET pkg_ "0" pkg)
list(LENGTH pkg_ pkg_has_version)
if(pkg_has_version GREATER 1)
list(GET pkg_ "1" pkg_version)
else()
if(${pkg}_GLOBAL_VERSION)
set(pkg_version ${${pkg}_GLOBAL_VERSION})
else()
unset(pkg_version)
endif()
endif()
find_package(${pkg} ${pkg_version})
if(${pkg}_FOUND)
list(APPEND _res ${${pkg}_PKG_CONFIG_NAME})
list(APPEND _res_libs ${${pkg}_LIBRARIES})
set(${pkg}_VERSION "${${pkg}_VERSION}" PARENT_SCOPE)
endif()
endforeach(pkg)
set(${result} "${_res}" PARENT_SCOPE)
set(${result}_LIBS "${_res_libs}" PARENT_SCOPE)
endfunction()

View File

@ -0,0 +1,102 @@
include(CMakeParseArguments)
function(find_pkg_config_with_fallback name)
cmake_parse_arguments(ARGS "" "PKG_CONFIG_NAME" "LIB_NAMES;LIB_DIR_HINTS;INCLUDE_NAMES;INCLUDE_DIR_PATHS;INCLUDE_DIR_HINTS;INCLUDE_DIR_SUFFIXES;DEPENDS" ${ARGN})
set(${name}_PKG_CONFIG_NAME ${ARGS_PKG_CONFIG_NAME} PARENT_SCOPE)
find_package(PkgConfig)
if(PKG_CONFIG_FOUND)
pkg_search_module(${name}_PKG_CONFIG QUIET ${ARGS_PKG_CONFIG_NAME})
endif(PKG_CONFIG_FOUND)
if (${name}_PKG_CONFIG_FOUND)
# Found via pkg-config, using its result values
set(${name}_FOUND ${${name}_PKG_CONFIG_FOUND})
# Try to find real file name of libraries
foreach(lib ${${name}_PKG_CONFIG_LIBRARIES})
find_library(${name}_${lib}_LIBRARY ${lib} HINTS ${${name}_PKG_CONFIG_LIBRARY_DIRS})
mark_as_advanced(${name}_${lib}_LIBRARY)
if(NOT ${name}_${lib}_LIBRARY)
unset(${name}_FOUND)
endif(NOT ${name}_${lib}_LIBRARY)
endforeach(lib)
if(${name}_FOUND)
set(${name}_LIBRARIES "")
foreach(lib ${${name}_PKG_CONFIG_LIBRARIES})
list(APPEND ${name}_LIBRARIES ${${name}_${lib}_LIBRARY})
endforeach(lib)
list(REMOVE_DUPLICATES ${name}_LIBRARIES)
set(${name}_LIBRARIES ${${name}_LIBRARIES} PARENT_SCOPE)
list(GET ${name}_LIBRARIES "0" ${name}_LIBRARY)
set(${name}_FOUND ${${name}_FOUND} PARENT_SCOPE)
set(${name}_INCLUDE_DIRS ${${name}_PKG_CONFIG_INCLUDE_DIRS} PARENT_SCOPE)
set(${name}_LIBRARIES ${${name}_PKG_CONFIG_LIBRARIES} PARENT_SCOPE)
set(${name}_LIBRARY ${${name}_LIBRARY} PARENT_SCOPE)
set(${name}_VERSION ${${name}_PKG_CONFIG_VERSION} PARENT_SCOPE)
if(NOT TARGET ${ARGS_PKG_CONFIG_NAME})
add_library(${ARGS_PKG_CONFIG_NAME} INTERFACE IMPORTED)
set_property(TARGET ${ARGS_PKG_CONFIG_NAME} PROPERTY INTERFACE_COMPILE_OPTIONS "${${name}_PKG_CONFIG_CFLAGS_OTHER}")
set_property(TARGET ${ARGS_PKG_CONFIG_NAME} PROPERTY INTERFACE_INCLUDE_DIRECTORIES "${${name}_PKG_CONFIG_INCLUDE_DIRS}")
set_property(TARGET ${ARGS_PKG_CONFIG_NAME} PROPERTY INTERFACE_LINK_LIBRARIES "${${name}_LIBRARIES}")
endif(NOT TARGET ${ARGS_PKG_CONFIG_NAME})
endif(${name}_FOUND)
else(${name}_PKG_CONFIG_FOUND)
# No success with pkg-config, try via find_library on all lib_names
set(${name}_FOUND "1")
foreach(lib ${ARGS_LIB_NAMES})
find_library(${name}_${lib}_LIBRARY ${ARGS_LIB_NAMES} HINTS ${ARGS_LIB_DIR_HINTS})
mark_as_advanced(${name}_${lib}_LIBRARY)
if(NOT ${name}_${lib}_LIBRARY)
unset(${name}_FOUND)
endif(NOT ${name}_${lib}_LIBRARY)
endforeach(lib)
foreach(inc ${ARGS_INCLUDE_NAMES})
find_path(${name}_${inc}_INCLUDE_PATH ${inc} HINTS ${ARGS_INCLUDE_DIR_HINTS} PATHS ${ARGS_INCLUDE_DIR_PATHS} PATH_SUFFIXES ${ARGS_INCLUDE_DIR_SUFFIXES})
mark_as_advanced(${name}_${inc}_INCLUDE_PATH)
if(NOT ${name}_${inc}_INCLUDE_PATH)
unset(${name}_FOUND)
endif(NOT ${name}_${inc}_INCLUDE_PATH)
endforeach(inc)
if(${name}_FOUND)
set(${name}_LIBRARIES "")
set(${name}_INCLUDE_DIRS "")
foreach(lib ${ARGS_LIB_NAMES})
list(APPEND ${name}_LIBRARIES ${${name}_${lib}_LIBRARY})
endforeach(lib)
foreach(inc ${ARGS_INCLUDE_NAMES})
list(APPEND ${name}_INCLUDE_DIRS ${${name}_${inc}_INCLUDE_PATH})
endforeach(inc)
list(GET ${name}_LIBRARIES "0" ${name}_LIBRARY)
foreach(dep ${ARGS_DEPENDS})
find_package(${dep} ${${dep}_GLOBAL_VERSION} QUIET)
if(${dep}_FOUND)
list(APPEND ${name}_INCLUDE_DIRS ${${dep}_INCLUDE_DIRS})
list(APPEND ${name}_LIBRARIES ${${dep}_LIBRARIES})
else(${dep}_FOUND)
unset(${name}_FOUND)
endif(${dep}_FOUND)
endforeach(dep)
set(${name}_FOUND ${${name}_FOUND} PARENT_SCOPE)
set(${name}_INCLUDE_DIRS ${${name}_INCLUDE_DIRS} PARENT_SCOPE)
set(${name}_LIBRARIES ${${name}_LIBRARIES} PARENT_SCOPE)
set(${name}_LIBRARY ${${name}_LIBRARY} PARENT_SCOPE)
unset(${name}_VERSION PARENT_SCOPE)
if(NOT TARGET ${ARGS_PKG_CONFIG_NAME})
add_library(${ARGS_PKG_CONFIG_NAME} INTERFACE IMPORTED)
set_property(TARGET ${ARGS_PKG_CONFIG_NAME} PROPERTY INTERFACE_INCLUDE_DIRECTORIES "${${name}_INCLUDE_DIRS}")
set_property(TARGET ${ARGS_PKG_CONFIG_NAME} PROPERTY INTERFACE_LINK_LIBRARIES "${${name}_LIBRARIES}")
endif(NOT TARGET ${ARGS_PKG_CONFIG_NAME})
endif(${name}_FOUND)
endif(${name}_PKG_CONFIG_FOUND)
endfunction()

View File

@ -0,0 +1,103 @@
include(CMakeParseArguments)
function(find_pkg_config_with_fallback_on_config_script name)
cmake_parse_arguments(ARGS "" "PKG_CONFIG_NAME" "CONFIG_SCRIPT_NAME" ${ARGN})
set(${name}_PKG_CONFIG_NAME ${ARGS_PKG_CONFIG_NAME} PARENT_SCOPE)
find_package(PkgConfig)
if(PKG_CONFIG_FOUND)
pkg_search_module(${name}_PKG_CONFIG QUIET ${ARGS_PKG_CONFIG_NAME})
endif(PKG_CONFIG_FOUND)
if (${name}_PKG_CONFIG_FOUND)
# Found via pkg-config, using it's result values
set(${name}_FOUND ${${name}_PKG_CONFIG_FOUND})
# Try to find real file name of libraries
foreach(lib ${${name}_PKG_CONFIG_LIBRARIES})
find_library(${name}_${lib}_LIBRARY ${lib} HINTS ${${name}_PKG_CONFIG_LIBRARY_DIRS})
mark_as_advanced(${name}_${lib}_LIBRARY)
if(NOT ${name}_${lib}_LIBRARY)
unset(${name}_FOUND)
endif(NOT ${name}_${lib}_LIBRARY)
endforeach(lib)
if(${name}_FOUND)
set(${name}_LIBRARIES "")
foreach(lib ${${name}_PKG_CONFIG_LIBRARIES})
list(APPEND ${name}_LIBRARIES ${${name}_${lib}_LIBRARY})
endforeach(lib)
list(REMOVE_DUPLICATES ${name}_LIBRARIES)
set(${name}_LIBRARIES ${${name}_LIBRARIES} PARENT_SCOPE)
list(GET ${name}_LIBRARIES "0" ${name}_LIBRARY)
set(${name}_FOUND ${${name}_FOUND} PARENT_SCOPE)
set(${name}_INCLUDE_DIRS ${${name}_PKG_CONFIG_INCLUDE_DIRS} PARENT_SCOPE)
set(${name}_LIBRARIES ${${name}_PKG_CONFIG_LIBRARIES} PARENT_SCOPE)
set(${name}_LIBRARY ${${name}_LIBRARY} PARENT_SCOPE)
set(${name}_VERSION ${${name}_PKG_CONFIG_VERSION} PARENT_SCOPE)
if(NOT TARGET ${ARGS_PKG_CONFIG_NAME})
add_library(${ARGS_PKG_CONFIG_NAME} INTERFACE IMPORTED)
set_property(TARGET ${ARGS_PKG_CONFIG_NAME} PROPERTY INTERFACE_COMPILE_OPTIONS "${${name}_PKG_CONFIG_CFLAGS_OTHER}")
set_property(TARGET ${ARGS_PKG_CONFIG_NAME} PROPERTY INTERFACE_INCLUDE_DIRECTORIES "${${name}_PKG_CONFIG_INCLUDE_DIRS}")
set_property(TARGET ${ARGS_PKG_CONFIG_NAME} PROPERTY INTERFACE_LINK_LIBRARIES "${${name}_LIBRARIES}")
endif(NOT TARGET ${ARGS_PKG_CONFIG_NAME})
endif(${name}_FOUND)
else(${name}_PKG_CONFIG_FOUND)
# No success with pkg-config, try via custom a *-config script
find_program(${name}_CONFIG_EXECUTABLE NAMES ${ARGS_CONFIG_SCRIPT_NAME}-config)
mark_as_advanced(${name}_CONFIG_EXECUTABLE)
find_program(${name}_SH_EXECUTABLE NAMES sh)
mark_as_advanced(${name}_SH_EXECUTABLE)
if(${name}_CONFIG_EXECUTABLE)
macro(config_script_fail errcode)
if(${errcode})
message(FATAL_ERROR "Error invoking ${ARGS_CONFIG_SCRIPT_NAME}-config: ${errcode}")
endif(${errcode})
endmacro(config_script_fail)
file(TO_NATIVE_PATH "${${name}_CONFIG_EXECUTABLE}" ${name}_CONFIG_EXECUTABLE)
file(TO_NATIVE_PATH "${${name}_SH_EXECUTABLE}" ${name}_SH_EXECUTABLE)
execute_process(COMMAND "${${name}_SH_EXECUTABLE}" "${${name}_CONFIG_EXECUTABLE}" --version
OUTPUT_VARIABLE ${name}_VERSION
RESULT_VARIABLE ERRCODE
OUTPUT_STRIP_TRAILING_WHITESPACE)
config_script_fail(${ERRCODE})
execute_process(COMMAND "${${name}_SH_EXECUTABLE}" "${${name}_CONFIG_EXECUTABLE}" --api-version
OUTPUT_VARIABLE ${name}_API_VERSION
RESULT_VARIABLE ERRCODE
OUTPUT_STRIP_TRAILING_WHITESPACE)
config_script_fail(${ERRCODE})
execute_process(COMMAND "${${name}_SH_EXECUTABLE}" "${${name}_CONFIG_EXECUTABLE}" --cflags
OUTPUT_VARIABLE ${name}_CFLAGS
RESULT_VARIABLE ERRCODE
OUTPUT_STRIP_TRAILING_WHITESPACE)
config_script_fail(${ERRCODE})
execute_process(COMMAND "${${name}_SH_EXECUTABLE}" "${${name}_CONFIG_EXECUTABLE}" --libs
OUTPUT_VARIABLE ${name}_LDFLAGS
RESULT_VARIABLE ERRCODE
OUTPUT_STRIP_TRAILING_WHITESPACE)
config_script_fail(${ERRCODE})
string(TOLOWER ${name} "${name}_LOWER")
string(REGEX REPLACE "^(.* |)-l([^ ]*${${name}_LOWER}[^ ]*)( .*|)$" "\\2" ${name}_LIBRARY_NAME "${${name}_LDFLAGS}")
string(REGEX REPLACE "^(.* |)-L([^ ]*)( .*|)$" "\\2" ${name}_LIBRARY_DIRS "${${name}_LDFLAGS}")
find_library(${name}_LIBRARY ${${name}_LIBRARY_NAME} HINTS ${${name}_LIBRARY_DIRS})
mark_as_advanced(${name}_LIBRARY)
set(${name}_LIBRARY ${${name}_LIBRARY} PARENT_SCOPE)
set(${name}_VERSION ${${name}_VERSION} PARENT_SCOPE)
unset(${name}_LIBRARY_NAME)
unset(${name}_LIBRARY_DIRS)
if(NOT TARGET ${name}_LOWER)
add_library(${name}_LOWER INTERFACE IMPORTED)
set_property(TARGET ${name}_LOWER PROPERTY INTERFACE_LINK_LIBRARIES "${${name}_LDFLAGS}")
set_property(TARGET ${name}_LOWER PROPERTY INTERFACE_COMPILE_OPTIONS "${${name}_CFLAGS}")
endif(NOT TARGET ${name}_LOWER)
endif(${name}_CONFIG_EXECUTABLE)
endif(${name}_PKG_CONFIG_FOUND)
endfunction()

33
cmake/SoupVersion.cmake Normal file
View File

@ -0,0 +1,33 @@
find_package(Nice QUIET)
if (Nice_FOUND AND NOT SOUP_VERSION AND NOT USE_SOUP3)
file(GET_RUNTIME_DEPENDENCIES
RESOLVED_DEPENDENCIES_VAR Nice_DEPENDENCIES
UNRESOLVED_DEPENDENCIES_VAR Nice_UNRESOLVED_DEPENDENCIES
LIBRARIES ${Nice_LIBRARY}
PRE_INCLUDE_REGEXES "soup|gupnp"
PRE_EXCLUDE_REGEXES "."
)
foreach (lib ${Nice_DEPENDENCIES})
if (lib MATCHES ".*/libsoup-3.*")
set(SOUP_VERSION 3)
endif ()
endforeach ()
foreach (lib ${Nice_DEPENDENCIES})
if (lib MATCHES ".*/libsoup-2.*")
set(SOUP_VERSION 2)
endif ()
endforeach ()
set(SOUP_VERSION ${SOUP_VERSION} CACHE STRING "Version of libsoup to use")
set_property(CACHE SOUP_VERSION PROPERTY STRINGS "2" "3")
message(STATUS "Using Soup${SOUP_VERSION} to provide Soup")
elseif (NOT SOUP_VERSION)
find_package(Soup2 QUIET)
find_package(Soup3 QUIET)
# Only use libsoup 3 if specifically requested or when libsoup 2 is not available
if (Soup3_FOUND AND NOT Soup2_FOUND OR USE_SOUP3)
set(SOUP_VERSION 3)
else ()
set(SOUP_VERSION 2)
endif ()
endif ()
set(Soup "Soup${SOUP_VERSION}")

28
cmake/UseGettext.cmake Normal file
View File

@ -0,0 +1,28 @@
function(_gettext_mkdir_for_file file)
get_filename_component(dir "${file}" DIRECTORY)
file(MAKE_DIRECTORY "${dir}")
endfunction()
function(gettext_compile project_name)
cmake_parse_arguments(ARGS "" "MO_FILES_NAME;TARGET_NAME;SOURCE_DIR;PROJECT_NAME" "" ${ARGN})
if(NOT ARGS_SOURCE_DIR)
set(ARGS_SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR})
endif(NOT ARGS_SOURCE_DIR)
file(STRINGS "${ARGS_SOURCE_DIR}/LINGUAS" LINGUAS)
set(target_files)
foreach(lang ${LINGUAS})
set(source_file ${ARGS_SOURCE_DIR}/${lang}.po)
set(target_file ${CMAKE_BINARY_DIR}/locale/${lang}/LC_MESSAGES/${project_name}.mo)
_gettext_mkdir_for_file(${target_file})
list(APPEND target_files ${target_file})
add_custom_command(OUTPUT ${target_file} COMMAND ${MSGFMT_EXECUTABLE} --check-format -o ${target_file} ${source_file} DEPENDS ${source_file})
install(FILES ${target_file} DESTINATION ${LOCALE_INSTALL_DIR}/${lang}/LC_MESSAGES)
endforeach(lang)
if(ARGS_MO_FILES_NAME)
set(${ARGS_MO_FILES_NAME} ${target_files} PARENT_SCOPE)
endif(ARGS_MO_FILES_NAME)
if(ARGS_TARGET_NAME)
add_custom_target(${ARGS_TARGET_NAME} DEPENDS ${target_files})
endif(ARGS_TARGET_NAME)
endfunction(gettext_compile)

337
cmake/UseVala.cmake Normal file
View File

@ -0,0 +1,337 @@
##
# Compile vala files to their c equivalents for further processing.
#
# The "vala_precompile" function takes care of calling the valac executable on
# the given source to produce c files which can then be processed further using
# default cmake functions.
#
# The first parameter provided is a variable, which will be filled with a list
# of c files outputted by the vala compiler. This list can than be used in
# conjuction with functions like "add_executable" or others to create the
# neccessary compile rules with CMake.
#
# The following sections may be specified afterwards to provide certain options
# to the vala compiler:
#
# SOURCES
# A list of .vala files to be compiled. Please take care to add every vala
# file belonging to the currently compiled project or library as Vala will
# otherwise not be able to resolve all dependencies.
#
# PACKAGES
# A list of vala packages/libraries to be used during the compile cycle. The
# package names are exactly the same, as they would be passed to the valac
# "--pkg=" option.
#
# OPTIONS
# A list of optional options to be passed to the valac executable. This can be
# used to pass "--thread" for example to enable multi-threading support.
#
# DEFINITIONS
# A list of symbols to be used for conditional compilation. They are the same
# as they would be passed using the valac "--define=" option.
#
# CUSTOM_VAPIS
# A list of custom vapi files to be included for compilation. This can be
# useful to include freshly created vala libraries without having to install
# them in the system.
#
# GENERATE_VAPI
# Pass all the needed flags to the compiler to create a vapi for
# the compiled library. The provided name will be used for this and a
# <provided_name>.vapi file will be created.
#
# GENERATE_HEADER
# Let the compiler generate a header file for the compiled code. There will
# be a header file as well as an internal header file being generated called
# <provided_name>.h and <provided_name>_internal.h
#
# The following call is a simple example to the vala_precompile macro showing
# an example to every of the optional sections:
#
# find_package(Vala "0.12" REQUIRED)
# include(${VALA_USE_FILE})
#
# vala_precompile(VALA_C
# SOURCES
# source1.vala
# source2.vala
# source3.vala
# PACKAGES
# gtk+-2.0
# gio-1.0
# posix
# DIRECTORY
# gen
# OPTIONS
# --thread
# CUSTOM_VAPIS
# some_vapi.vapi
# GENERATE_VAPI
# myvapi
# GENERATE_HEADER
# myheader
# )
#
# Most important is the variable VALA_C which will contain all the generated c
# file names after the call.
##
##
# Copyright 2009-2010 Jakob Westhoff. All rights reserved.
# Copyright 2010-2011 Daniel Pfeifer
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY JAKOB WESTHOFF ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
# EVENT SHALL JAKOB WESTHOFF OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are those
# of the authors and should not be interpreted as representing official policies,
# either expressed or implied, of Jakob Westhoff
##
include(CMakeParseArguments)
function(_vala_mkdir_for_file file)
get_filename_component(dir "${file}" DIRECTORY)
file(MAKE_DIRECTORY "${dir}")
endfunction()
function(vala_precompile output)
cmake_parse_arguments(ARGS "FAST_VAPI" "DIRECTORY;GENERATE_HEADER;GENERATE_VAPI;EXPORTS_DIR"
"SOURCES;PACKAGES;OPTIONS;DEFINITIONS;CUSTOM_VAPIS;CUSTOM_DEPS;GRESOURCES" ${ARGN})
# Header and internal header is needed to generate internal vapi
if (ARGS_GENERATE_VAPI AND NOT ARGS_GENERATE_HEADER)
set(ARGS_GENERATE_HEADER ${ARGS_GENERATE_VAPI})
endif(ARGS_GENERATE_VAPI AND NOT ARGS_GENERATE_HEADER)
if("Ninja" STREQUAL ${CMAKE_GENERATOR} AND NOT DISABLE_FAST_VAPI AND NOT ARGS_GENERATE_HEADER)
set(ARGS_FAST_VAPI true)
endif()
if(ARGS_DIRECTORY)
get_filename_component(DIRECTORY ${ARGS_DIRECTORY} ABSOLUTE)
else(ARGS_DIRECTORY)
set(DIRECTORY ${CMAKE_CURRENT_BINARY_DIR})
endif(ARGS_DIRECTORY)
if(ARGS_EXPORTS_DIR)
set(ARGS_EXPORTS_DIR ${CMAKE_BINARY_DIR}/${ARGS_EXPORTS_DIR})
else(ARGS_EXPORTS_DIR)
set(ARGS_EXPORTS_DIR ${CMAKE_BINARY_DIR}/exports)
endif(ARGS_EXPORTS_DIR)
file(MAKE_DIRECTORY "${ARGS_EXPORTS_DIR}")
include_directories(${DIRECTORY} ${ARGS_EXPORTS_DIR})
set(vala_pkg_opts "")
foreach(pkg ${ARGS_PACKAGES})
list(APPEND vala_pkg_opts "--pkg=${pkg}")
endforeach(pkg ${ARGS_PACKAGES})
set(vala_define_opts "")
foreach(def ${ARGS_DEFINITIONS})
list(APPEND vala_define_opts "--define=${def}")
endforeach(def ${ARGS_DEFINITIONS})
set(custom_vapi_arguments "")
if(ARGS_CUSTOM_VAPIS)
foreach(vapi ${ARGS_CUSTOM_VAPIS})
if(${vapi} MATCHES ${CMAKE_SOURCE_DIR} OR ${vapi} MATCHES ${CMAKE_BINARY_DIR})
list(APPEND custom_vapi_arguments ${vapi})
else (${vapi} MATCHES ${CMAKE_SOURCE_DIR} OR ${vapi} MATCHES ${CMAKE_BINARY_DIR})
list(APPEND custom_vapi_arguments ${CMAKE_CURRENT_SOURCE_DIR}/${vapi})
endif(${vapi} MATCHES ${CMAKE_SOURCE_DIR} OR ${vapi} MATCHES ${CMAKE_BINARY_DIR})
endforeach(vapi ${ARGS_CUSTOM_VAPIS})
endif(ARGS_CUSTOM_VAPIS)
set(gresources_args "")
if(ARGS_GRESOURCES)
set(gresources_args --gresources "${ARGS_GRESOURCES}")
endif(ARGS_GRESOURCES)
set(in_files "")
set(fast_vapi_files "")
set(out_files "")
set(out_extra_files "")
set(out_deps_files "")
set(vapi_arguments "")
if(ARGS_GENERATE_VAPI)
list(APPEND out_extra_files "${ARGS_EXPORTS_DIR}/${ARGS_GENERATE_VAPI}.vapi")
list(APPEND out_extra_files "${ARGS_EXPORTS_DIR}/${ARGS_GENERATE_VAPI}_internal.vapi")
set(vapi_arguments "--vapi=${ARGS_EXPORTS_DIR}/${ARGS_GENERATE_VAPI}.vapi" "--internal-vapi=${ARGS_EXPORTS_DIR}/${ARGS_GENERATE_VAPI}_internal.vapi")
if(ARGS_PACKAGES)
string(REPLACE ";" "\\n" pkgs "${ARGS_PACKAGES};${ARGS_CUSTOM_DEPS}")
add_custom_command(OUTPUT "${ARGS_EXPORTS_DIR}/${ARGS_GENERATE_VAPI}.deps" COMMAND echo -e "\"${pkgs}\"" > "${ARGS_EXPORTS_DIR}/${ARGS_GENERATE_VAPI}.deps" COMMENT "Generating ${ARGS_GENERATE_VAPI}.deps")
endif(ARGS_PACKAGES)
endif(ARGS_GENERATE_VAPI)
set(header_arguments "")
if(ARGS_GENERATE_HEADER)
list(APPEND out_extra_files "${ARGS_EXPORTS_DIR}/${ARGS_GENERATE_HEADER}.h")
list(APPEND out_extra_files "${ARGS_EXPORTS_DIR}/${ARGS_GENERATE_HEADER}_internal.h")
list(APPEND header_arguments "--header=${ARGS_EXPORTS_DIR}/${ARGS_GENERATE_HEADER}.h")
list(APPEND header_arguments "--internal-header=${ARGS_EXPORTS_DIR}/${ARGS_GENERATE_HEADER}_internal.h")
endif(ARGS_GENERATE_HEADER)
string(REPLACE " " ";" VALAC_FLAGS ${CMAKE_VALA_FLAGS})
if (VALA_VERSION VERSION_GREATER "0.38")
set(VALAC_COLORS "--color=always")
endif ()
if(ARGS_FAST_VAPI)
foreach(src ${ARGS_SOURCES} ${ARGS_UNPARSED_ARGUMENTS})
set(in_file "${CMAKE_CURRENT_SOURCE_DIR}/${src}")
list(APPEND in_files "${in_file}")
string(REPLACE ".vala" ".c" src ${src})
string(REPLACE ".gs" ".c" src ${src})
string(REPLACE ".c" ".vapi" fast_vapi ${src})
set(fast_vapi_file "${DIRECTORY}/${fast_vapi}")
list(APPEND fast_vapi_files "${fast_vapi_file}")
list(APPEND out_files "${DIRECTORY}/${src}")
_vala_mkdir_for_file("${fast_vapi_file}")
add_custom_command(OUTPUT ${fast_vapi_file}
COMMAND
${VALA_EXECUTABLE}
ARGS
${VALAC_COLORS}
--fast-vapi ${fast_vapi_file}
${vala_define_opts}
${ARGS_OPTIONS}
${VALAC_FLAGS}
${in_file}
DEPENDS
${in_file}
COMMENT
"Generating fast VAPI ${fast_vapi}"
)
endforeach(src ${ARGS_SOURCES} ${ARGS_UNPARSED_ARGUMENTS})
foreach(src ${ARGS_SOURCES} ${ARGS_UNPARSED_ARGUMENTS})
set(in_file "${CMAKE_CURRENT_SOURCE_DIR}/${src}")
string(REPLACE ".vala" ".c" c_code ${src})
string(REPLACE ".gs" ".c" c_code ${c_code})
string(REPLACE ".c" ".vapi" fast_vapi ${c_code})
set(my_fast_vapi_file "${DIRECTORY}/${fast_vapi}")
set(c_code_file "${DIRECTORY}/${c_code}")
set(fast_vapi_flags "")
set(fast_vapi_stamp "")
foreach(fast_vapi_file ${fast_vapi_files})
if(NOT "${fast_vapi_file}" STREQUAL "${my_fast_vapi_file}")
list(APPEND fast_vapi_flags --use-fast-vapi "${fast_vapi_file}")
list(APPEND fast_vapi_stamp "${fast_vapi_file}")
endif()
endforeach(fast_vapi_file)
_vala_mkdir_for_file("${fast_vapi_file}")
get_filename_component(dir "${c_code_file}" DIRECTORY)
add_custom_command(OUTPUT ${c_code_file}
COMMAND
${VALA_EXECUTABLE}
ARGS
${VALAC_COLORS}
"-C"
"-d" ${dir}
${vala_pkg_opts}
${vala_define_opts}
${gresources_args}
${ARGS_OPTIONS}
${VALAC_FLAGS}
${fast_vapi_flags}
${in_file}
${custom_vapi_arguments}
DEPENDS
${fast_vapi_stamp}
${in_file}
${ARGS_CUSTOM_VAPIS}
${ARGS_GRESOURCES}
COMMENT
"Generating C source ${c_code}"
)
endforeach(src)
if(NOT "${out_extra_files}" STREQUAL "")
add_custom_command(OUTPUT ${out_extra_files}
COMMAND
${VALA_EXECUTABLE}
ARGS
${VALAC_COLORS}
-C -q --disable-warnings
${header_arguments}
${vapi_arguments}
"-b" ${CMAKE_CURRENT_SOURCE_DIR}
"-d" ${DIRECTORY}
${vala_pkg_opts}
${vala_define_opts}
${gresources_args}
${ARGS_OPTIONS}
${VALAC_FLAGS}
${in_files}
${custom_vapi_arguments}
DEPENDS
${in_files}
${ARGS_CUSTOM_VAPIS}
${ARGS_GRESOURCES}
COMMENT
"Generating VAPI and headers for target ${output}"
)
endif()
else(ARGS_FAST_VAPI)
foreach(src ${ARGS_SOURCES} ${ARGS_UNPARSED_ARGUMENTS})
set(in_file "${CMAKE_CURRENT_SOURCE_DIR}/${src}")
list(APPEND in_files "${in_file}")
string(REPLACE ".vala" ".c" src ${src})
string(REPLACE ".gs" ".c" src ${src})
list(APPEND out_files "${DIRECTORY}/${src}")
_vala_mkdir_for_file("${fast_vapi_file}")
endforeach(src ${ARGS_SOURCES} ${ARGS_UNPARSED_ARGUMENTS})
add_custom_command(OUTPUT ${out_files} ${out_extra_files}
COMMAND
${VALA_EXECUTABLE}
ARGS
${VALAC_COLORS}
-C
${header_arguments}
${vapi_arguments}
"-b" ${CMAKE_CURRENT_SOURCE_DIR}
"-d" ${DIRECTORY}
${vala_pkg_opts}
${vala_define_opts}
${gresources_args}
${ARGS_OPTIONS}
${VALAC_FLAGS}
${in_files}
${custom_vapi_arguments}
DEPENDS
${in_files}
${ARGS_CUSTOM_VAPIS}
${ARGS_GRESOURCES}
COMMENT
"Generating C code for target ${output}"
)
endif(ARGS_FAST_VAPI)
set(${output} ${out_files} PARENT_SCOPE)
endfunction(vala_precompile)

View File

@ -0,0 +1,21 @@
if(NOT EXISTS "@CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt")
message(FATAL_ERROR "Cannot find install manifest: @CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt")
endif(NOT EXISTS "@CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt")
file(READ "@CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt" files)
string(REGEX REPLACE "\n" ";" files "${files}")
foreach(file ${files})
message(STATUS "Uninstalling: $ENV{DESTDIR}${file}")
if(IS_SYMLINK "$ENV{DESTDIR}${file}" OR EXISTS "$ENV{DESTDIR}${file}")
exec_program(
"@CMAKE_COMMAND@" ARGS "-E remove \"$ENV{DESTDIR}${file}\""
OUTPUT_VARIABLE rm_out
RETURN_VALUE rm_retval
)
if(NOT "${rm_retval}" STREQUAL 0)
message(FATAL_ERROR "Problem when removing $ENV{DESTDIR}${file}")
endif(NOT "${rm_retval}" STREQUAL 0)
else(IS_SYMLINK "$ENV{DESTDIR}${file}" OR EXISTS "$ENV{DESTDIR}${file}")
message(STATUS "File $ENV{DESTDIR}${file} does not exist.")
endif(IS_SYMLINK "$ENV{DESTDIR}${file}" OR EXISTS "$ENV{DESTDIR}${file}")
endforeach(file)

298
configure vendored Executable file
View File

@ -0,0 +1,298 @@
#!/bin/sh
OPTS=`getopt -o "h" --long \
help,fetch-only,no-debug,disable-fast-vapi,with-tests,release,with-libsignal-in-tree,with-libsoup3,\
enable-plugin:,disable-plugin:,\
prefix:,program-prefix:,exec-prefix:,lib-suffix:,\
bindir:,libdir:,includedir:,datadir:,\
host:,build:,\
sbindir:,sysconfdir:,libexecdir:,localstatedir:,sharedstatedir:,mandir:,infodir:,\
-n './configure' -- "$@"`
if [ $? != 0 ] ; then echo "-- Ignoring unrecognized options." >&2 ; fi
eval set -- "$OPTS"
PREFIX=${PREFIX:-/usr/local}
ENABLED_PLUGINS=
DISABLED_PLUGINS=
BUILD_LIBSIGNAL_IN_TREE=
BUILD_TESTS=
BUILD_TYPE=Debug
DISABLE_FAST_VAPI=
LIB_SUFFIX=
NO_DEBUG=
FETCH_ONLY=
USE_SOUP3=
EXEC_PREFIX=
BINDIR=
SBINDIR=n
SYSCONFDIR=
DATADIR=
INCLUDEDIR=
LIBDIR=
LIBEXECDIR=
LOCALSTATEDIR=
SHAREDSTATEDIR=
MANDIR=
INFODIR=
help() {
cat << EOF
Usage:
./configure [OPTION]...
Defaults for the options (based on current environment) are specified in
brackets.
Configuration:
-h, --help Print this help and exit
--disable-fast-vapi Disable the usage of Vala compilers fast-vapi
feature. fast-vapi mode is slower when doing
clean builds, but faster when doing incremental
builds (during development).
--fetch-only Only fetch the files required to run ./configure
without network access later and exit.
--no-debug Build without debug symbols
--release Configure to build an optimized release version
--with-libsignal-in-tree Build libsignal-protocol-c in tree and link it
statically.
--with-libsoup3 Build with libsoup-3.0
--with-tests Also build tests.
Plugin configuration:
--enable-plugin=PLUGIN Enable compilation of plugin PLUGIN.
--disable-plugin=PLUGIN Disable compilation of plugin PLUGIN.
Installation directories:
--prefix=PREFIX Install architecture-independent files in PREFIX
[$PREFIX]
--program-prefix=PREFIX Same as --prefix
--exec-prefix= Install architecture-dependent files in EPREFIX
[PREFIX]
--lib-suffix=SUFFIX Append SUFFIX to the directory name for libraries
By default, \`make install' will install all the files in
\`/usr/local/bin', \`/usr/local/lib' etc. You can specify
an installation prefix other than \`/usr/local' using \`--prefix',
for instance \`--prefix=\$HOME'.
For better control, use the options below.
Fine tuning of the installation directories:
--bindir=DIR user executables [EPREFIX/bin]
--libdir=DIR object code libraries [EPREFIX/lib]
--includedir=DIR C header files [PREFIX/include]
--datadir=DIR read-only data [PREFIX/share]
For compatibility with autotools, these options will be silently ignored:
--host, --build, --sbindir, --sysconfdir, --libexecdir, --sharedstatedir,
--localstatedir, --mandir, --infodir
Some influential environment variables:
CC C compiler command
CFLAGS C compiler flags
PKG_CONFIG_PATH directories to add to pkg-config's search path
PKG_CONFIG_LIBDIR path overriding pkg-config's built-in search path
USE_CCACHE decide to use ccache when compiling C objects
VALAC Vala compiler command
VALACFLAGS Vala compiler flags
Use these variables to override the choices made by \`configure' or to help
it to find libraries and programs with nonstandard names/locations.
EOF
}
while true; do
case "$1" in
--prefix ) PREFIX="$2"; shift; shift ;;
--enable-plugin ) if [ -z "$ENABLED_PLUGINS" ]; then ENABLED_PLUGINS="$2"; else ENABLED_PLUGINS="$ENABLED_PLUGINS;$2"; fi; shift; shift ;;
--disable-plugin ) if [ -z "$DISABLED_PLUGINS" ]; then DISABLED_PLUGINS="$2"; else DISABLED_PLUGINS="$DISABLED_PLUGINS;$2"; fi; shift; shift ;;
--valac ) VALA_EXECUTABLE="$2"; shift; shift ;;
--valac-flags ) VALAC_FLAGS="$2"; shift; shift ;;
--lib-suffix ) LIB_SUFFIX="$2"; shift; shift ;;
--with-libsignal-in-tree ) BUILD_LIBSIGNAL_IN_TREE=yes; shift ;;
--with-libsoup3 ) USE_SOUP3=yes; shift ;;
--disable-fast-vapi ) DISABLE_FAST_VAPI=yes; shift ;;
--no-debug ) NO_DEBUG=yes; shift ;;
--fetch-only ) FETCH_ONLY=yes; shift ;;
--release ) BUILD_TYPE=RelWithDebInfo; shift ;;
--with-tests ) BUILD_TESTS=yes; shift ;;
# Autotools paths
--program-prefix ) PREFIX="$2"; shift; shift ;;
--exec-prefix ) EXEC_PREFIX="$2"; shift; shift ;;
--bindir ) BINDIR="$2"; shift; shift ;;
--datadir ) DATADIR="$2"; shift; shift ;;
--includedir ) INCLUDEDIR="$2"; shift; shift ;;
--libdir ) LIBDIR="$2"; shift; shift ;;
# Autotools paths not used
--sbindir ) SBINDIR="$2"; shift; shift ;;
--sysconfdir ) SYSCONFDIR="$2"; shift; shift ;;
--libexecdir ) LIBEXECDIR="$2"; shift; shift ;;
--localstatedir ) LOCALSTATEDIR="$2"; shift; shift ;;
--sharedstatedir ) SHAREDSTATEDIR="$2"; shift; shift ;;
--mandir ) MANDIR="$2"; shift; shift ;;
--infodir ) INFODIR="$2"; shift; shift ;;
--host | --build ) shift; shift ;;
-h | --help ) help; exit 0 ;;
-- ) shift; break ;;
* ) break ;;
esac
done
if [ "$BUILD_LIBSIGNAL_IN_TREE" = "yes" ] || [ "$FETCH_ONLY" = "yes" ]; then
if [ -d ".git" ]; then
git submodule update --init 2>/dev/null
else
tmp=0
for i in $(cat .gitmodules | grep -n submodule | awk -F ':' '{print $1}') $(wc -l .gitmodules | awk '{print $1}'); do
if ! [ $tmp -eq 0 ]; then
name=$(cat .gitmodules | head -n $tmp | tail -n 1 | awk -F '"' '{print $2}')
def=$(cat .gitmodules | head -n $i | tail -n $(expr "$i" - "$tmp") | awk -F ' ' '{print $1 $2 $3}')
path=$(echo "$def" | grep '^path=' | awk -F '=' '{print $2}')
url=$(echo "$def" | grep '^url=' | awk -F '=' '{print $2}')
branch=$(echo "$def" | grep '^branch=' | awk -F '=' '{print $2}')
if ! ls "$path"/* >/dev/null 2>/dev/null; then
git=$(which git)
if ! [ $? -eq 0 ] || ! [ -x $git ]; then
echo "Failed retrieving missing files"
exit 5
fi
res=$(git clone "$url" "$path" 2>&1)
if ! [ $? -eq 0 ] || ! [ -d $path ]; then
echo "Failed retrieving missing files: $res"
exit 5
fi
if [ -n "$branch" ]; then
olddir="$(pwd)"
cd "$path"
res=$(git checkout "$branch" 2>&1)
if ! [ $? -eq 0 ]; then
echo "Failed retrieving missing files: $res"
exit 5
fi
cd "$olddir"
fi
echo "Submodule path '$path': checked out '$branch' (via git clone)"
fi
fi
tmp=$i
done
fi
fi
if [ "$FETCH_ONLY" = "yes" ]; then exit 0; fi
if [ ! -x "$(which cmake 2>/dev/null)" ]
then
echo "-!- CMake required."
exit 1
fi
ninja_bin="$(which ninja-build 2>/dev/null)"
if ! [ -x "$ninja_bin" ]; then
ninja_bin="$(which ninja 2>/dev/null)"
fi
if [ -x "$ninja_bin" ]; then
ninja_version=`$ninja_bin --version 2>/dev/null`
if [ $? -eq 0 ]; then
if [ -d build ]; then
last_ninja_version=`cat build/.ninja_version 2>/dev/null`
else
last_ninja_version=0
fi
if [ "$ninja_version" != "$last_ninja_version" ]; then
echo "-- Found Ninja: $ninja_bin (found version \"$ninja_version\")"
fi
cmake_type="Ninja"
exec_bin="$ninja_bin"
exec_command="$exec_bin"
elif [ "/usr/sbin/ninja" = "$ninja_bin" ]; then
echo "-- Ninja at $ninja_bin is not usable. Did you install 'ninja' instead of 'ninja-build'?"
fi
fi
if ! [ -x "$exec_bin" ]; then
make_bin="$(which make 2>/dev/null)"
if [ -x "$make_bin" ]; then
echo "-- Found Make: $make_bin"
cmake_type="Unix Makefiles"
exec_bin="$make_bin"
exec_command="$exec_bin"
echo "-- Running with make. Using Ninja (ninja-build) might improve build experience."
fi
fi
if ! [ -x "$exec_bin" ]; then
echo "-!- No compatible build system (Ninja, Make) found."
exit 4
fi
if [ -f ./build ]; then
echo "-!- ./build file exists. ./configure can't continue"
exit 2
fi
if [ -d build ]; then
last_type=`cat build/.cmake_type`
if [ "$cmake_type" != "$last_type" ]
then
echo "-- Using different build system, cleaning build system files"
cd build
rm -r CMakeCache.txt CMakeFiles
cd ..
fi
fi
mkdir -p build
cd build
echo "$cmake_type" > .cmake_type
echo "$ninja_version" > .ninja_version
cmake -G "$cmake_type" \
-DCMAKE_INSTALL_PREFIX="$PREFIX" \
-DCMAKE_BUILD_TYPE="$BUILD_TYPE" \
-DENABLED_PLUGINS="$ENABLED_PLUGINS" \
-DDISABLED_PLUGINS="$DISABLED_PLUGINS" \
-DBUILD_TESTS="$BUILD_TESTS" \
-DBUILD_LIBSIGNAL_IN_TREE="$BUILD_LIBSIGNAL_IN_TREE" \
-DUSE_SOUP3="$USE_SOUP3" \
-DVALA_EXECUTABLE="$VALAC" \
-DCMAKE_VALA_FLAGS="$VALACFLAGS" \
-DDISABLE_FAST_VAPI="$DISABLE_FAST_VAPI" \
-DLIB_SUFFIX="$LIB_SUFFIX" \
-DNO_DEBUG="$NO_DEBUG" \
-DEXEC_INSTALL_PREFIX="$EXEC_PREFIX" \
-DSHARE_INSTALL_PREFIX="$DATADIR" \
-DBIN_INSTALL_DIR="$BINDIR" \
-DINCLUDE_INSTALL_DIR="$INCLUDEDIR" \
-DLIB_INSTALL_DIR="$LIBDIR" \
-Wno-dev \
.. || exit 9
if [ "$cmake_type" = "Ninja" ]; then
cat << EOF > Makefile
default:
@sh -c "$exec_command"
%:
@sh -c "$exec_command \"\$@\""
EOF
fi
cd ..
cat << EOF > Makefile
default:
@sh -c "cd build; $exec_command"
distclean: clean uninstall
test: default
echo "make test not yet supported"
%:
@sh -c "cd build; $exec_command \"\$@\""
EOF
echo "-- Configured. Type 'make' to build, 'make install' to install."

View File

@ -0,0 +1,41 @@
find_package(GCrypt REQUIRED)
find_package(Srtp2 REQUIRED)
find_packages(CRYPTO_VALA_PACKAGES REQUIRED
GLib
GObject
GIO
)
vala_precompile(CRYPTO_VALA_C
SOURCES
"src/cipher.vala"
"src/cipher_converter.vala"
"src/error.vala"
"src/random.vala"
"src/srtp.vala"
CUSTOM_VAPIS
"${CMAKE_CURRENT_SOURCE_DIR}/vapi/gcrypt.vapi"
"${CMAKE_CURRENT_SOURCE_DIR}/vapi/libsrtp2.vapi"
PACKAGES
${CRYPTO_VALA_PACKAGES}
GENERATE_VAPI
crypto-vala
GENERATE_HEADER
crypto-vala
)
add_custom_target(crypto-vala-vapi
DEPENDS
${CMAKE_BINARY_DIR}/exports/crypto-vala.vapi
${CMAKE_BINARY_DIR}/exports/crypto-vala.deps
)
add_definitions(${VALA_CFLAGS} -DG_LOG_DOMAIN="crypto-vala")
add_library(crypto-vala SHARED ${CRYPTO_VALA_C})
add_dependencies(crypto-vala crypto-vala-vapi)
target_link_libraries(crypto-vala ${CRYPTO_VALA_PACKAGES} gcrypt libsrtp2)
set_target_properties(crypto-vala PROPERTIES VERSION 0.0 SOVERSION 0)
install(TARGETS crypto-vala ${TARGET_INSTALL})
install(FILES ${CMAKE_BINARY_DIR}/exports/crypto-vala.vapi ${CMAKE_BINARY_DIR}/exports/crypto-vala.deps DESTINATION ${VAPI_INSTALL_DIR})
install(FILES ${CMAKE_BINARY_DIR}/exports/crypto-vala.h DESTINATION ${INCLUDE_INSTALL_DIR})

View File

@ -1,2 +0,0 @@
gio-2.0
glib-2.0

View File

@ -1,23 +0,0 @@
dependencies = [
dep_gio,
dep_glib,
dep_libgcrypt,
dep_libsrtp2,
]
sources = files(
'src/cipher.vala',
'src/cipher_converter.vala',
'src/error.vala',
'src/random.vala',
'src/srtp.vala',
)
c_args = [
'-DG_LOG_DOMAIN="crypto-vala"',
]
vala_args = [
'--vapidir', meson.current_source_dir() / 'vapi',
]
lib_crypto_vala = library('crypto-vala', sources, c_args: c_args, vala_args: vala_args, dependencies: dependencies, version: '0.0', install: true, install_dir: [true, true, true], install_rpath: default_install_rpath)
dep_crypto_vala = declare_dependency(link_with: lib_crypto_vala, include_directories: include_directories('.'))
install_data('crypto-vala.deps', install_dir: get_option('datadir') / 'vala/vapi', install_tag: 'devel') # TODO: workaround for https://github.com/mesonbuild/meson/issues/9756

View File

@ -55,7 +55,7 @@ public class SymmetricCipherEncrypter : SymmetricCipherConverter {
}
return ConverterResult.CONVERTED;
} catch (Crypto.Error e) {
throw new IOError.FAILED(@"$(e.domain) error while encrypting: $(e.message)");
throw new IOError.FAILED(@"$(e.domain) error while decrypting: $(e.message)");
}
}
}

194
dino.doap
View File

@ -3,7 +3,7 @@
<Project>
<name>Dino</name>
<short-name>dino</short-name>
<shortdesc xml:lang="en">Modern XMPP chat client</shortdesc>
<shortdesc xml:lang="en">Modern XMPP Chat Client</shortdesc>
<shortdesc xml:lang="zh-TW">現代化的 XMPP 用戶端聊天軟件</shortdesc>
<shortdesc xml:lang="zh-CN">现代 XMPP 聊天客户端</shortdesc>
<shortdesc xml:lang="tr">Modern XMPP Sohbet İstemcisi</shortdesc>
@ -227,28 +227,24 @@
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0004.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0027.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0030.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0045.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
@ -256,7 +252,6 @@
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0047.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:note>For use with XEP-0261</xmpp:note>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
@ -264,14 +259,12 @@
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0048.html"/>
<xmpp:status>deprecated</xmpp:status>
<xmpp:note>Migrating to XEP-0402 if supported by server</xmpp:note>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0049.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
@ -279,7 +272,6 @@
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0054.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:note>Only for viewing avatars</xmpp:note>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
@ -287,14 +279,12 @@
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0059.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:note>For use with XEP-0313</xmpp:note>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0060.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
@ -302,7 +292,6 @@
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0065.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:note>For use with XEP-0260</xmpp:note>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
@ -310,49 +299,42 @@
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0066.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:note>For file transfers using XEP-0363</xmpp:note>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0077.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0082.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0084.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0085.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0115.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0153.html"/>
<xmpp:status>deprecated</xmpp:status>
<xmpp:since>0.1</xmpp:since>
<xmpp:note>Only to fetch Avatars from other users</xmpp:note>
</xmpp:SupportedXep>
</implements>
@ -360,98 +342,78 @@
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0163.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0166.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0167.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0176.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0177.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0184.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0191.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0198.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0199.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0203.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0215.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0222.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0223.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0234.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
@ -459,7 +421,6 @@
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0245.html"/>
<xmpp:version>1.0</xmpp:version>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
@ -467,70 +428,43 @@
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0249.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:note>No support for sending</xmpp:note>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0260.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:version>1.0.3</xmpp:version>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0261.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:version>1.0</xmpp:version>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0272.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:version>0.2.0</xmpp:version>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0280.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:version>1.0.1</xmpp:version>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0293.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:version>1.0.2</xmpp:version>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0294.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:version>1.1.2</xmpp:version>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0297.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:version>1.0</xmpp:version>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0298.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:since>0.3</xmpp:since>
<xmpp:status>complete</xmpp:status>
<xmpp:note>For use with XEP-0280</xmpp:note>
</xmpp:SupportedXep>
</implements>
<implements>
@ -545,7 +479,7 @@
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0313.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:since>0.1</xmpp:since>
<xmpp:note>Not for MUCs</xmpp:note>
</xmpp:SupportedXep>
</implements>
<implements>
@ -553,21 +487,18 @@
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0320.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:version>1.0.0</xmpp:version>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0333.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0334.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
@ -575,37 +506,24 @@
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0353.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:version>0.3.1</xmpp:version>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0359.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0363.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0367.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:version>0.3</xmpp:version>
<xmpp:since>0.5</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0368.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:version>1.1.0</xmpp:version>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
@ -613,7 +531,6 @@
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0380.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:note>Only for outgoing messages</xmpp:note>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
@ -621,121 +538,42 @@
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0384.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:version>0.3.0</xmpp:version>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0391.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:version>0.1.2</xmpp:version>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0392.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:version>1.0.0</xmpp:version>
<xmpp:since>0.5</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0393.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:version>1.1.1</xmpp:version>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0394.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:version>0.3.0</xmpp:version>
<xmpp:since>0.5</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0396.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0398.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0402.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:version>1.2.0</xmpp:version>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0410.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.2</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0421.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:version>0.2.0</xmpp:version>
<xmpp:since>0.4</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0426.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:version>0.3.0</xmpp:version>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0428.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:version>0.2.1</xmpp:version>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0444.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:version>0.1.1</xmpp:version>
<xmpp:since>0.4</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0446.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:version>0.2.0</xmpp:version>
<xmpp:since>0.5</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0447.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:version>0.3.0</xmpp:version>
<xmpp:since>0.5</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0453.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:version>0.1.2</xmpp:version>
</xmpp:SupportedXep>
</implements>
<implements>
@ -743,30 +581,6 @@
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0454.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:note>No support for embedded thumbnails</xmpp:note>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0461.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:version>0.2.0</xmpp:version>
<xmpp:since>0.4</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0482.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:version>0.1.0</xmpp:version>
<xmpp:since>0.5</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0486.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:version>0.1.0</xmpp:version>
</xmpp:SupportedXep>
</implements>
</Project>

View File

@ -9,7 +9,7 @@
<name>Dino</name>
<short-name>dino</short-name>
<shortdesc xml:lang="en">Modern XMPP chat client</shortdesc>
<shortdesc xml:lang="en">Modern XMPP Chat Client</shortdesc>
<description xml:lang="en">
Dino is a modern open-source chat client for the desktop. It focuses on providing a clean and reliable Jabber/XMPP experience while having your privacy in mind.
It supports end-to-end encryption with OMEMO and OpenPGP and allows configuring privacy-related features such as read receipts and typing notifications.
@ -47,28 +47,24 @@
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0004.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0027.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0030.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0045.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
@ -76,7 +72,6 @@
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0047.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:note>For use with XEP-0261</xmpp:note>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
@ -84,14 +79,12 @@
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0048.html" />
<xmpp:status>deprecated</xmpp:status>
<xmpp:note>Migrating to XEP-0402 if supported by server</xmpp:note>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0049.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
@ -99,7 +92,6 @@
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0054.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:note>Only for viewing avatars</xmpp:note>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
@ -107,14 +99,12 @@
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0059.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:note>For use with XEP-0313</xmpp:note>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0060.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
@ -122,7 +112,6 @@
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0065.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:note>For use with XEP-0260</xmpp:note>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
@ -130,49 +119,42 @@
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0066.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:note>For file transfers using XEP-0363</xmpp:note>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0077.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0082.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0084.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0085.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0115.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0153.html" />
<xmpp:status>deprecated</xmpp:status>
<xmpp:since>0.1</xmpp:since>
<xmpp:note>Only to fetch Avatars from other users</xmpp:note>
</xmpp:SupportedXep>
</implements>
@ -180,98 +162,78 @@
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0163.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0166.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0167.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0176.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0177.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0184.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0191.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0198.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0199.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0203.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0215.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0222.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0223.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0234.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
@ -279,7 +241,6 @@
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0245.html" />
<xmpp:version>1.0</xmpp:version>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
@ -287,70 +248,43 @@
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0249.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:note>No support for sending</xmpp:note>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0260.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:version>1.0.3</xmpp:version>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0261.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:version>1.0</xmpp:version>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0272.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:version>0.2.0</xmpp:version>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0280.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:version>1.0.1</xmpp:version>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0293.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:version>1.0.2</xmpp:version>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0294.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:version>1.1.2</xmpp:version>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0297.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:version>1.0</xmpp:version>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0298.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:since>0.3</xmpp:since>
<xmpp:status>complete</xmpp:status>
<xmpp:note>For use with XEP-0280</xmpp:note>
</xmpp:SupportedXep>
</implements>
<implements>
@ -365,7 +299,7 @@
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0313.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:since>0.1</xmpp:since>
<xmpp:note>Not for MUCs</xmpp:note>
</xmpp:SupportedXep>
</implements>
<implements>
@ -373,21 +307,18 @@
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0320.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:version>1.0.0</xmpp:version>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0333.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0334.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
@ -395,37 +326,24 @@
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0353.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:version>0.3.1</xmpp:version>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0359.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0363.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0367.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:version>0.3</xmpp:version>
<xmpp:since>0.5</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0368.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:version>1.1.0</xmpp:version>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
@ -433,7 +351,6 @@
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0380.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:note>Only for outgoing messages</xmpp:note>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
@ -441,121 +358,42 @@
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0384.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:version>0.3.0</xmpp:version>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0391.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:version>0.1.2</xmpp:version>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0392.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:version>1.0.0</xmpp:version>
<xmpp:since>0.5</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0393.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:version>1.1.1</xmpp:version>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0394.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:version>0.3.0</xmpp:version>
<xmpp:since>0.5</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0396.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0398.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0402.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:version>1.2.0</xmpp:version>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0410.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.2</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0421.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:version>0.2.0</xmpp:version>
<xmpp:since>0.4</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0426.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:version>0.3.0</xmpp:version>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0428.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:version>0.2.1</xmpp:version>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0444.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:version>0.1.1</xmpp:version>
<xmpp:since>0.4</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0446.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:version>0.2.0</xmpp:version>
<xmpp:since>0.5</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0447.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:version>0.3.0</xmpp:version>
<xmpp:since>0.5</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0453.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:version>0.1.2</xmpp:version>
</xmpp:SupportedXep>
</implements>
<implements>
@ -563,30 +401,6 @@
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0454.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:note>No support for embedded thumbnails</xmpp:note>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0461.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:version>0.2.0</xmpp:version>
<xmpp:since>0.4</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0482.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:version>0.1.0</xmpp:version>
<xmpp:since>0.5</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0486.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:version>0.1.0</xmpp:version>
</xmpp:SupportedXep>
</implements>
</Project>

View File

@ -1,133 +0,0 @@
{
"id": "im.dino.Dino",
"runtime": "org.gnome.Platform",
"runtime-version": "48",
"sdk": "org.gnome.Sdk",
"command": "dino",
"finish-args": [
"--share=ipc",
"--socket=fallback-x11",
"--socket=wayland",
"--socket=pulseaudio",
"--socket=gpg-agent",
"--filesystem=xdg-run/pipewire-0",
"--share=network",
"--device=dri",
"--talk-name=org.freedesktop.Notifications"
],
"modules": [
{
"name": "protobuf",
"buildsystem": "cmake-ninja",
"cleanup": [
"*"
],
"config-opts": [
"-Dprotobuf_BUILD_TESTS=OFF",
"-Dprotobuf_BUILD_LIBUPB=OFF"
],
"sources": [
{
"type": "archive",
"url": "https://github.com/protocolbuffers/protobuf/releases/download/v30.2/protobuf-30.2.tar.gz",
"sha512": "555d1b18d175eeaf17f3879f124d33080f490367840d35b34bfc4e4a5b383bf6a1d09f1570acb6af9c53ac4940a14572d46423b6e3dd0c712e7802c986fb6be6",
"x-checker-data": {
"type": "anitya",
"project-id": 3715,
"stable-only": true,
"url-template": "https://github.com/protocolbuffers/protobuf/releases/download/v$version/protobuf-$version.tar.gz"
}
}
]
},
{
"name": "libprotobuf-c",
"buildsystem": "autotools",
"config-opts": [
"CFLAGS=-fPIC"
],
"post-install": [
"rm /app/lib/*.so"
],
"cleanup": [
"*"
],
"sources": [
{
"type": "archive",
"url": "https://github.com/protobuf-c/protobuf-c/releases/download/v1.5.2/protobuf-c-1.5.2.tar.gz",
"sha512": "78dc72988d7e8232c1b967849aa00939bc05ab7d39b86a8e2af005e38aa4ef4c9b03920d51fb5337399d980e65f35d11bd4742bea745a893ecc909f56a51c9ac",
"x-checker-data": {
"type": "anitya",
"project-id": 3716,
"stable-only": true,
"url-template": "https://github.com/protobuf-c/protobuf-c/releases/download/v$version/protobuf-c-$version.tar.gz"
}
}
]
},
{
"name": "libomemo-c",
"buildsystem": "meson",
"cleanup": [
"/lib/pkgconfig",
"/include"
],
"config-opts": [
"-Dtests=false",
"-Ddefault_library=static"
],
"sources": [
{
"type": "archive",
"url": "https://github.com/dino/libomemo-c/releases/download/v0.5.1/libomemo-c-0.5.1.tar.gz",
"sha512": "ff59565406c51663f2944e9a7c12c5b0e3fa01073039f5161472dd81f59194b1cf2685bc1e0cc930a141bc409b965c5d93313cfc3e0e237250102af3b5e88826",
"x-checker-data": {
"type": "anitya",
"project-id": 359676,
"stable-only": true,
"url-template": "https://github.com/dino/libomemo-c/releases/download/v$version/libomemo-c-$version.tar.gz"
}
}
]
},
{
"name": "qrencode",
"buildsystem": "cmake-ninja",
"cleanup": [
"*"
],
"config-opts": [
"-DCMAKE_C_FLAGS=-fPIC"
],
"sources": [
{
"type": "archive",
"url": "https://github.com/fukuchi/libqrencode/archive/refs/tags/v4.1.1.tar.gz",
"sha512": "584106e7bcaaa1ef2efe63d653daad38d4ff436eb4b185a1db3c747169c1ffa74149c3b1329bb0b8ae007903db0a7034aabf135cc196d91a37b5c61348154a65",
"x-checker-data": {
"type": "anitya",
"project-id": 12834,
"stable-only": true,
"url-template": "https://github.com/fukuchi/libqrencode/archive/refs/tags/v$version.tar.gz"
}
}
]
},
{
"name": "dino",
"buildsystem": "meson",
"builddir": true,
"cleanup": [
"/include",
"/share/vala"
],
"sources": [
{
"type": "dir",
"path": "."
}
]
}
]
}

125
libdino/CMakeLists.txt Normal file
View File

@ -0,0 +1,125 @@
find_packages(LIBDINO_PACKAGES REQUIRED
GDKPixbuf2
Gee
GLib
GModule
GObject
)
vala_precompile(LIBDINO_VALA_C
SOURCES
src/application.vala
src/dbus/login1.vala
src/dbus/notifications.vala
src/dbus/upower.vala
src/entity/account.vala
src/entity/call.vala
src/entity/conversation.vala
src/entity/encryption.vala
src/entity/file_transfer.vala
src/entity/message.vala
src/entity/settings.vala
src/plugin/interfaces.vala
src/plugin/loader.vala
src/plugin/registry.vala
src/service/avatar_manager.vala
src/service/blocking_manager.vala
src/service/call_store.vala
src/service/call_state.vala
src/service/call_peer_state.vala
src/service/calls.vala
src/service/chat_interaction.vala
src/service/connection_manager.vala
src/service/content_item_store.vala
src/service/conversation_manager.vala
src/service/counterpart_interaction_manager.vala
src/service/database.vala
src/service/entity_capabilities_storage.vala
src/service/entity_info.vala
src/service/fallback_body.vala
src/service/file_manager.vala
src/service/file_transfer_storage.vala
src/service/history_sync.vala
src/service/jingle_file_transfers.vala
src/service/message_correction.vala
src/service/message_processor.vala
src/service/message_storage.vala
src/service/module_manager.vala
src/service/muc_manager.vala
src/service/notification_events.vala
src/service/presence_manager.vala
src/service/replies.vala
src/service/reactions.vala
src/service/registration.vala
src/service/roster_manager.vala
src/service/search_processor.vala
src/service/stream_interactor.vala
src/service/util.vala
src/util/display_name.vala
src/util/util.vala
src/util/weak_map.vala
CUSTOM_VAPIS
"${CMAKE_BINARY_DIR}/exports/xmpp-vala.vapi"
"${CMAKE_BINARY_DIR}/exports/qlite.vapi"
CUSTOM_DEPS
xmpp-vala
qlite
PACKAGES
${LIBDINO_PACKAGES}
GENERATE_VAPI
dino
GENERATE_HEADER
dino
)
add_custom_command(OUTPUT "${CMAKE_BINARY_DIR}/exports/dino_i18n.h"
COMMAND
cp "${CMAKE_CURRENT_SOURCE_DIR}/src/dino_i18n.h" "${CMAKE_BINARY_DIR}/exports/dino_i18n.h"
DEPENDS
"${CMAKE_CURRENT_SOURCE_DIR}/src/dino_i18n.h"
COMMENT
Copy header file dino_i18n.h
)
add_custom_target(dino-vapi
DEPENDS
${CMAKE_BINARY_DIR}/exports/dino.vapi
${CMAKE_BINARY_DIR}/exports/dino.deps
${CMAKE_BINARY_DIR}/exports/dino_i18n.h
)
add_definitions(${VALA_CFLAGS} -DDINO_SYSTEM_PLUGIN_DIR="${PLUGIN_INSTALL_DIR}" -DDINO_SYSTEM_LIBDIR_NAME="${LIBDIR_NAME}" -DG_LOG_DOMAIN="libdino" -DDINO_VERSION=\"${PROJECT_VERSION}\")
add_library(libdino SHARED ${LIBDINO_VALA_C} ${CMAKE_BINARY_DIR}/exports/dino_i18n.h)
add_dependencies(libdino dino-vapi)
target_link_libraries(libdino xmpp-vala qlite ${LIBDINO_PACKAGES} m)
set_target_properties(libdino PROPERTIES PREFIX "" VERSION 0.0 SOVERSION 0)
install(TARGETS libdino ${TARGET_INSTALL})
install(FILES ${CMAKE_BINARY_DIR}/exports/dino.vapi ${CMAKE_BINARY_DIR}/exports/dino.deps DESTINATION ${VAPI_INSTALL_DIR})
install(FILES ${CMAKE_BINARY_DIR}/exports/dino.h ${CMAKE_BINARY_DIR}/exports/dino_i18n.h DESTINATION ${INCLUDE_INSTALL_DIR})
if(BUILD_TESTS)
vala_precompile(LIBDINO_TEST_VALA_C
SOURCES
"tests/weak_map.vala"
"tests/testcase.vala"
"tests/common.vala"
CUSTOM_VAPIS
${CMAKE_BINARY_DIR}/exports/dino_internal.vapi
${CMAKE_BINARY_DIR}/exports/xmpp-vala.vapi
${CMAKE_BINARY_DIR}/exports/qlite.vapi
PACKAGES
${LIBDINO_PACKAGES}
OPTIONS
${LIBDINO_EXTRA_OPTIONS}
)
add_definitions(${VALA_CFLAGS})
add_executable(libdino-test ${LIBDINO_TEST_VALA_C})
target_link_libraries(libdino-test libdino)
endif(BUILD_TESTS)

View File

@ -1,6 +0,0 @@
gdk-pixbuf-2.0
gee-0.8
glib-2.0
gmodule-2.0
qlite
xmpp-vala

View File

@ -1,92 +0,0 @@
# version_vala
dot_git = meson.current_source_dir() / '../.git'
version_file = meson.current_source_dir() / '../VERSION'
command = [prog_python, files('version.py'), version_file, '--git-repo', meson.current_source_dir()]
if prog_git.found()
command += ['--git', prog_git]
endif
version_vala = vcs_tag(command: command, input: 'src/version.vala.in', output: 'version.vala', replace_string: '%VERSION%')
# libdino
dependencies = [
dep_gdk_pixbuf,
dep_gee,
dep_gio,
dep_glib,
dep_gmodule,
dep_qlite,
dep_xmpp_vala
]
sources = files(
'src/application.vala',
'src/dbus/login1.vala',
'src/dbus/notifications.vala',
'src/dbus/upower.vala',
'src/entity/account.vala',
'src/entity/call.vala',
'src/entity/conversation.vala',
'src/entity/encryption.vala',
'src/entity/file_transfer.vala',
'src/entity/message.vala',
'src/entity/settings.vala',
'src/plugin/interfaces.vala',
'src/plugin/loader.vala',
'src/plugin/registry.vala',
'src/service/avatar_manager.vala',
'src/service/blocking_manager.vala',
'src/service/call_store.vala',
'src/service/call_state.vala',
'src/service/call_peer_state.vala',
'src/service/calls.vala',
'src/service/chat_interaction.vala',
'src/service/connection_manager.vala',
'src/service/contact_model.vala',
'src/service/content_item_store.vala',
'src/service/conversation_manager.vala',
'src/service/counterpart_interaction_manager.vala',
'src/service/database.vala',
'src/service/entity_capabilities_storage.vala',
'src/service/entity_info.vala',
'src/service/fallback_body.vala',
'src/service/file_manager.vala',
'src/service/file_transfer_storage.vala',
'src/service/history_sync.vala',
'src/service/jingle_file_transfers.vala',
'src/service/message_correction.vala',
'src/service/message_processor.vala',
'src/service/message_storage.vala',
'src/service/module_manager.vala',
'src/service/muc_manager.vala',
'src/service/notification_events.vala',
'src/service/presence_manager.vala',
'src/service/replies.vala',
'src/service/reactions.vala',
'src/service/registration.vala',
'src/service/roster_manager.vala',
'src/service/search_processor.vala',
'src/service/sfs_metadata.vala',
'src/service/stateless_file_sharing.vala',
'src/service/stream_interactor.vala',
'src/service/util.vala',
'src/util/display_name.vala',
'src/util/limit_input_stream.vala',
'src/util/send_message.vala',
'src/util/util.vala',
'src/util/weak_map.vala',
'src/util/weak_timeout.vala',
)
sources += [version_vala]
c_args = [
'-DDINO_SYSTEM_LIBDIR_NAME="@0@"'.format(get_option('prefix') / get_option('libdir')),
'-DDINO_SYSTEM_PLUGIN_DIR="@0@"'.format(get_option('prefix') / get_option('libdir') / get_option('plugindir')),
'-DG_LOG_DOMAIN="libdino"',
]
vala_args = []
if meson.get_compiler('vala').version().version_compare('=0.56.11')
vala_args += ['-D', 'VALA_0_56_11']
endif
lib_dino = library('libdino', sources, c_args: c_args, vala_args: vala_args, include_directories: include_directories('src'), dependencies: dependencies, name_prefix: '', version: '0.0', install: true, install_dir: [true, true, true], install_rpath: default_install_rpath)
dep_dino = declare_dependency(link_with: lib_dino, include_directories: include_directories('.', 'src'))
install_data('dino.deps', install_dir: get_option('datadir') / 'vala/vapi', install_tag: 'devel') # TODO: workaround for https://github.com/mesonbuild/meson/issues/9756
install_headers('src/dino_i18n.h')

View File

@ -2,6 +2,7 @@ using Dino.Entities;
namespace Dino {
extern const string VERSION;
public string get_version() { return VERSION; }
public string get_short_version() {
if (!VERSION.contains("~")) return VERSION;
@ -39,12 +40,12 @@ public interface Application : GLib.Application {
PresenceManager.start(stream_interactor);
CounterpartInteractionManager.start(stream_interactor);
BlockingManager.start(stream_interactor);
Calls.start(stream_interactor, db);
ConversationManager.start(stream_interactor, db);
MucManager.start(stream_interactor);
AvatarManager.start(stream_interactor, db);
RosterManager.start(stream_interactor, db);
FileManager.start(stream_interactor, db);
Calls.start(stream_interactor, db);
CallStore.start(stream_interactor, db);
ContentItemStore.start(stream_interactor, db);
ChatInteraction.start(stream_interactor);
@ -57,8 +58,6 @@ public interface Application : GLib.Application {
Reactions.start(stream_interactor, db);
Replies.start(stream_interactor, db);
FallbackBody.start(stream_interactor, db);
ContactModels.start(stream_interactor);
StatelessFileSharing.start(stream_interactor, db);
create_actions();

View File

@ -8,10 +8,7 @@ public class Account : Object {
public int id { get; set; }
public string localpart { get { return full_jid.localpart; } }
public string domainpart { get { return full_jid.domainpart; } }
public string resourcepart {
get { return full_jid.resourcepart; }
private set { full_jid.resourcepart = value; }
}
public string resourcepart { get { return full_jid.resourcepart;} }
public Jid bare_jid { owned get { return full_jid.bare_jid; } }
public Jid full_jid { get; private set; }
public string? password { get; set; }
@ -21,17 +18,28 @@ public class Account : Object {
public string? alias { get; set; }
public bool enabled { get; set; default = false; }
public string? roster_version { get; set; }
public DateTime mam_earliest_synced { get; set; default=new DateTime.from_unix_utc(0); }
private Database? db;
public Account(Jid bare_jid, string password) {
public Account(Jid bare_jid, string? resourcepart, string? password, string? alias) {
this.id = -1;
try {
this.full_jid = bare_jid.with_resource(get_random_resource());
} catch (InvalidJidError e) {
error("Auto-generated resource was invalid (%s)", e.message);
if (resourcepart != null) {
try {
this.full_jid = bare_jid.with_resource(resourcepart);
} catch (InvalidJidError e) {
warning("Tried to create account with invalid resource (%s), defaulting to auto generated", e.message);
}
}
if (this.full_jid == null) {
try {
this.full_jid = bare_jid.with_resource("dino." + Random.next_int().to_string("%x"));
} catch (InvalidJidError e) {
error("Auto-generated resource was invalid (%s)", e.message);
}
}
this.password = password;
this.alias = alias;
}
public Account.from_row(Database db, Qlite.Row row) throws InvalidJidError {
@ -42,6 +50,7 @@ public class Account : Object {
alias = row[db.account.alias];
enabled = row[db.account.enabled];
roster_version = row[db.account.roster_version];
mam_earliest_synced = new DateTime.from_unix_utc(row[db.account.mam_earliest_synced]);
notify.connect(on_update);
}
@ -57,6 +66,7 @@ public class Account : Object {
.value(db.account.alias, alias)
.value(db.account.enabled, enabled)
.value(db.account.roster_version, roster_version)
.value(db.account.mam_earliest_synced, (long)mam_earliest_synced.to_unix())
.perform();
notify.connect(on_update);
@ -69,14 +79,6 @@ public class Account : Object {
db = null;
}
public void set_random_resource() {
this.resourcepart = get_random_resource();
}
private static string get_random_resource() {
return "dino." + Random.next_int().to_string("%x");
}
public bool equals(Account acc) {
return equals_func(this, acc);
}
@ -104,6 +106,8 @@ public class Account : Object {
update.set(db.account.enabled, enabled); break;
case "roster-version":
update.set(db.account.roster_version, roster_version); break;
case "mam-earliest-synced":
update.set(db.account.mam_earliest_synced, (long)mam_earliest_synced.to_unix()); break;
}
update.perform();
}

View File

@ -33,7 +33,7 @@ public class Conversation : Object {
}
}
}
public Encryption encryption { get; set; default = Encryption.UNKNOWN; }
public Encryption encryption { get; set; default = Encryption.NONE; }
public Message? read_up_to { get; set; }
public int read_up_to_item { get; set; default=-1; }

View File

@ -4,8 +4,6 @@ namespace Dino.Entities {
public class FileTransfer : Object {
public signal void sources_changed();
public const bool DIRECTION_SENT = true;
public const bool DIRECTION_RECEIVED = false;
@ -17,7 +15,6 @@ public class FileTransfer : Object {
}
public int id { get; set; default=-1; }
public string? file_sharing_id { get; set; }
public Account account { get; set; }
public Jid counterpart { get; set; }
public Jid ourpart { get; set; }
@ -67,52 +64,14 @@ public class FileTransfer : Object {
}
public string path { get; set; }
public string? mime_type { get; set; }
public int64 size { get; set; }
// TODO(hrxi): expand to 64 bit
public int size { get; set; default=-1; }
public State state { get; set; default=State.NOT_STARTED; }
public int provider { get; set; }
public string info { get; set; }
public Cancellable cancellable { get; default=new Cancellable(); }
// This value is not persisted
public int64 transferred_bytes { get; set; }
public Xep.FileMetadataElement.FileMetadata file_metadata {
owned get {
return new Xep.FileMetadataElement.FileMetadata() {
name = this.file_name,
mime_type = this.mime_type,
size = this.size,
desc = this.desc,
date = this.modification_date,
width = this.width,
height = this.height,
length = this.length,
hashes = this.hashes,
thumbnails = this.thumbnails
};
}
set {
this.file_name = value.name;
this.mime_type = value.mime_type;
this.size = value.size;
this.desc = value.desc;
this.modification_date = value.date;
this.width = value.width;
this.height = value.height;
this.length = value.length;
this.hashes = value.hashes;
this.thumbnails = value.thumbnails;
}
}
public string? desc { get; set; }
public DateTime? modification_date { get; set; }
public int width { get; set; default=-1; }
public int height { get; set; default=-1; }
public int64 length { get; set; default=-1; }
public Gee.List<Xep.CryptographicHashes.Hash> hashes = new Gee.ArrayList<Xep.CryptographicHashes.Hash>();
public Gee.List<Xep.StatelessFileSharing.Source> sfs_sources = new Gee.ArrayList<Xep.StatelessFileSharing.Source>(Xep.StatelessFileSharing.Source.equals_func);
public Gee.List<Xep.JingleContentThumbnails.Thumbnail> thumbnails = new Gee.ArrayList<Xep.JingleContentThumbnails.Thumbnail>();
private Database? db;
private string storage_dir;
@ -121,7 +80,6 @@ public class FileTransfer : Object {
this.storage_dir = storage_dir;
id = row[db.file_transfer.id];
file_sharing_id = row[db.file_transfer.file_sharing_id];
account = db.get_account_by_id(row[db.file_transfer.account_id]); // TODO dont have to generate acc new
counterpart = db.get_jid_by_id(row[db.file_transfer.counterpart_id]);
@ -141,37 +99,10 @@ public class FileTransfer : Object {
file_name = row[db.file_transfer.file_name];
path = row[db.file_transfer.path];
mime_type = row[db.file_transfer.mime_type];
size = (int64) row[db.file_transfer.size];
size = row[db.file_transfer.size];
state = (State) row[db.file_transfer.state];
provider = row[db.file_transfer.provider];
info = row[db.file_transfer.info];
modification_date = new DateTime.from_unix_utc(row[db.file_transfer.modification_date]);
width = row[db.file_transfer.width];
height = row[db.file_transfer.height];
length = (int64) row[db.file_transfer.length];
// TODO put those into the initial query
foreach(var hash_row in db.file_hashes.select().with(db.file_hashes.id, "=", id)) {
Xep.CryptographicHashes.Hash hash = new Xep.CryptographicHashes.Hash();
hash.algo = hash_row[db.file_hashes.algo];
hash.val = hash_row[db.file_hashes.value];
hashes.add(hash);
}
foreach(var thumbnail_row in db.file_thumbnails.select().with(db.file_thumbnails.id, "=", id)) {
Xep.JingleContentThumbnails.Thumbnail thumbnail = new Xep.JingleContentThumbnails.Thumbnail();
thumbnail.data = Xmpp.get_data_for_uri(thumbnail_row[db.file_thumbnails.uri]);
thumbnail.media_type = thumbnail_row[db.file_thumbnails.mime_type];
thumbnail.width = thumbnail_row[db.file_thumbnails.width];
thumbnail.height = thumbnail_row[db.file_thumbnails.height];
thumbnails.add(thumbnail);
}
foreach(Qlite.Row source_row in db.sfs_sources.select().with(db.sfs_sources.file_transfer_id, "=", id)) {
if (source_row[db.sfs_sources.type] == "http") {
sfs_sources.add(new Xep.StatelessFileSharing.HttpSource() { url=source_row[db.sfs_sources.data] });
}
}
notify.connect(on_update);
}
@ -190,79 +121,26 @@ public class FileTransfer : Object {
.value(db.file_transfer.local_time, (long) local_time.to_unix())
.value(db.file_transfer.encryption, encryption)
.value(db.file_transfer.file_name, file_name)
.value(db.file_transfer.size, (long) size)
.value(db.file_transfer.size, size)
.value(db.file_transfer.state, state)
.value(db.file_transfer.provider, provider)
.value(db.file_transfer.info, info);
if (file_sharing_id != null) builder.value(db.file_transfer.file_sharing_id, file_sharing_id);
if (file_name != null) builder.value(db.file_transfer.file_name, file_name);
if (path != null) builder.value(db.file_transfer.path, path);
if (mime_type != null) builder.value(db.file_transfer.mime_type, mime_type);
if (path != null) builder.value(db.file_transfer.path, path);
if (modification_date != null) builder.value(db.file_transfer.modification_date, (long) modification_date.to_unix());
if (width != -1) builder.value(db.file_transfer.width, width);
if (height != -1) builder.value(db.file_transfer.height, height);
if (length != -1) builder.value(db.file_transfer.length, (long) length);
id = (int) builder.perform();
foreach (Xep.CryptographicHashes.Hash hash in hashes) {
db.file_hashes.insert()
.value(db.file_hashes.id, id)
.value(db.file_hashes.algo, hash.algo)
.value(db.file_hashes.value, hash.val)
.perform();
}
foreach (Xep.JingleContentThumbnails.Thumbnail thumbnail in thumbnails) {
string data_uri = "data:image/png;base64," + Base64.encode(thumbnail.data.get_data());
db.file_thumbnails.insert()
.value(db.file_thumbnails.id, id)
.value(db.file_thumbnails.uri, data_uri)
.value(db.file_thumbnails.mime_type, thumbnail.media_type)
.value(db.file_thumbnails.width, thumbnail.width)
.value(db.file_thumbnails.height, thumbnail.height)
.perform();
}
foreach (Xep.StatelessFileSharing.Source source in sfs_sources) {
persist_source(source);
}
notify.connect(on_update);
}
public void add_sfs_source(Xep.StatelessFileSharing.Source source) {
if (sfs_sources.contains(source)) return; // Don't add the same source twice. Might happen due to MAM and lacking deduplication.
sfs_sources.add(source);
if (id != -1) {
persist_source(source);
}
sources_changed();
}
private void persist_source(Xep.StatelessFileSharing.Source source) {
Xep.StatelessFileSharing.HttpSource? http_source = source as Xep.StatelessFileSharing.HttpSource;
if (http_source != null) {
db.sfs_sources.insert()
.value(db.sfs_sources.file_transfer_id, id)
.value(db.sfs_sources.type, "http")
.value(db.sfs_sources.data, http_source.url)
.perform();
}
}
public File? get_file() {
if (path == null) return null;
public File get_file() {
return File.new_for_path(Path.build_filename(Dino.get_storage_dir(), "files", path));
}
private void on_update(Object o, ParamSpec sp) {
Qlite.UpdateBuilder update_builder = db.file_transfer.update().with(db.file_transfer.id, "=", id);
switch (sp.name) {
case "file-sharing-id":
update_builder.set(db.file_transfer.file_sharing_id, file_sharing_id); break;
case "counterpart":
update_builder.set(db.file_transfer.counterpart_id, db.get_jid_id(counterpart));
update_builder.set(db.file_transfer.counterpart_resource, counterpart.resourcepart); break;
@ -283,7 +161,7 @@ public class FileTransfer : Object {
case "mime-type":
update_builder.set(db.file_transfer.mime_type, mime_type); break;
case "size":
update_builder.set(db.file_transfer.size, (long) size); break;
update_builder.set(db.file_transfer.size, size); break;
case "state":
if (state == State.IN_PROGRESS) return;
update_builder.set(db.file_transfer.state, state); break;
@ -291,14 +169,6 @@ public class FileTransfer : Object {
update_builder.set(db.file_transfer.provider, provider); break;
case "info":
update_builder.set(db.file_transfer.info, info); break;
case "modification-date":
update_builder.set(db.file_transfer.modification_date, (long) modification_date.to_unix()); break;
case "width":
update_builder.set(db.file_transfer.width, width); break;
case "height":
update_builder.set(db.file_transfer.height, height); break;
case "length":
update_builder.set(db.file_transfer.length, (long) length); break;
}
update_builder.perform();
}

View File

@ -67,10 +67,9 @@ public class Message : Object {
}
}
public string? edit_to = null;
public int quoted_item_id { get; private set; default=0; }
public int quoted_item_id = 0;
private Gee.List<Xep.FallbackIndication.Fallback> fallbacks = null;
private Gee.List<Xep.MessageMarkup.Span> markups = null;
private Database? db;
@ -143,101 +142,30 @@ public class Message : Object {
notify.connect(on_update);
}
public void set_quoted_item(int quoted_content_item_id) {
if (id == -1) {
warning("Message needs to be persisted before setting quoted item");
return;
}
this.quoted_item_id = quoted_content_item_id;
db.reply.upsert()
.value(db.reply.message_id, id, true)
.value(db.reply.quoted_content_item_id, quoted_content_item_id)
.value_null(db.reply.quoted_message_stanza_id)
.value_null(db.reply.quoted_message_from)
.perform();
}
public Gee.List<Xep.FallbackIndication.Fallback> get_fallbacks() {
if (fallbacks != null) return fallbacks;
fetch_body_meta();
return fallbacks;
}
public Gee.List<Xep.MessageMarkup.Span> get_markups() {
if (markups != null) return markups;
fetch_body_meta();
return markups;
}
public void persist_markups(Gee.List<Xep.MessageMarkup.Span> markups, int message_id) {
this.markups = markups;
foreach (var span in markups) {
foreach (var ty in span.types) {
db.body_meta.insert()
.value(db.body_meta.info_type, Xep.MessageMarkup.NS_URI)
.value(db.body_meta.message_id, message_id)
.value(db.body_meta.info, Xep.MessageMarkup.span_type_to_str(ty))
.value(db.body_meta.from_char, span.start_char)
.value(db.body_meta.to_char, span.end_char)
.perform();
}
}
}
private void fetch_body_meta() {
var fallbacks_by_ns = new HashMap<string, ArrayList<Xep.FallbackIndication.FallbackLocation>>();
var markups = new ArrayList<Xep.MessageMarkup.Span>();
foreach (Qlite.Row row in db.body_meta.select().with(db.body_meta.message_id, "=", id)) {
switch (row[db.body_meta.info_type]) {
case Xep.FallbackIndication.NS_URI:
string ns_uri = row[db.body_meta.info];
if (!fallbacks_by_ns.has_key(ns_uri)) {
fallbacks_by_ns[ns_uri] = new ArrayList<Xep.FallbackIndication.FallbackLocation>();
}
fallbacks_by_ns[ns_uri].add(new Xep.FallbackIndication.FallbackLocation.partial_body(row[db.body_meta.from_char], row[db.body_meta.to_char]));
break;
case Xep.MessageMarkup.NS_URI:
var types = new ArrayList<Xep.MessageMarkup.SpanType>();
types.add(Xep.MessageMarkup.str_to_span_type(row[db.body_meta.info]));
markups.add(new Xep.MessageMarkup.Span() { types=types, start_char=row[db.body_meta.from_char], end_char=row[db.body_meta.to_char] });
break;
if (row[db.body_meta.info_type] != Xep.FallbackIndication.NS_URI) continue;
string ns_uri = row[db.body_meta.info];
if (!fallbacks_by_ns.has_key(ns_uri)) {
fallbacks_by_ns[ns_uri] = new ArrayList<Xep.FallbackIndication.FallbackLocation>();
}
fallbacks_by_ns[ns_uri].add(new Xep.FallbackIndication.FallbackLocation(row[db.body_meta.from_char], row[db.body_meta.to_char]));
}
var fallbacks = new ArrayList<Xep.FallbackIndication.Fallback>();
foreach (string ns_uri in fallbacks_by_ns.keys) {
fallbacks.add(new Xep.FallbackIndication.Fallback(ns_uri, fallbacks_by_ns[ns_uri]));
fallbacks.add(new Xep.FallbackIndication.Fallback(ns_uri, fallbacks_by_ns[ns_uri].to_array()));
}
this.fallbacks = fallbacks;
this.markups = markups;
return fallbacks;
}
public void set_fallbacks(Gee.List<Xep.FallbackIndication.Fallback> fallbacks) {
if (id == -1) {
warning("Message needs to be persisted before setting fallbacks");
return;
}
this.fallbacks = fallbacks;
foreach (var fallback in fallbacks) {
foreach (var location in fallback.locations) {
db.body_meta.insert()
.value(db.body_meta.message_id, id)
.value(db.body_meta.info_type, Xep.FallbackIndication.NS_URI)
.value(db.body_meta.info, fallback.ns_uri)
.value(db.body_meta.from_char, location.from_char)
.value(db.body_meta.to_char, location.to_char)
.perform();
}
}
}
public void set_type_string(string type) {
@ -274,7 +202,6 @@ public class Message : Object {
}
public static uint hash_func(Message message) {
if (message.body == null) return 0;
return message.body.hash();
}

View File

@ -79,24 +79,6 @@ public class Settings : Object {
check_spelling_ = value;
}
}
public Encryption get_default_encryption(Account account) {
string? setting = db.account_settings.get_value(account.id, "default-encryption");
if (setting != null) {
return (Encryption) int.parse(setting);
}
return Encryption.OMEMO;
}
public void set_default_encryption(Account account, Encryption encryption) {
db.account_settings.upsert()
.value(db.account_settings.key, "default-encryption", true)
.value(db.account_settings.account_id, account.id, true)
.value(db.account_settings.value, ((int)encryption).to_string())
.perform();
}
}
}

View File

@ -55,19 +55,10 @@ public abstract class AccountSettingsEntry : Object {
public abstract Object? get_widget(WidgetType type);
}
public abstract class EncryptionPreferencesEntry : Object {
public abstract string id { get; }
public virtual Priority priority { get { return Priority.DEFAULT; } }
public abstract Object? get_widget(Account account, WidgetType type);
}
public interface ContactDetailsProvider : Object {
public abstract string id { get; }
public abstract string tab { get; }
public abstract void populate(Conversation conversation, ContactDetails contact_details, WidgetType type);
public abstract Object? get_widget(Conversation conversation);
}
public class ContactDetails : Object {
@ -102,7 +93,7 @@ public abstract interface ConversationAdditionPopulator : ConversationItemPopula
public abstract interface VideoCallPlugin : Object {
public abstract bool supported();
public abstract bool supports(string? media);
// Video widget
public abstract VideoCallWidget? create_widget(WidgetType type);

View File

@ -3,15 +3,14 @@ using Gee;
namespace Dino.Plugins {
public class Registry {
public HashMap<Entities.Encryption, EncryptionListEntry> encryption_list_entries = new HashMap<Entities.Encryption, EncryptionListEntry>();
public HashMap<string, CallEncryptionEntry> call_encryption_entries = new HashMap<string, CallEncryptionEntry>();
public ArrayList<AccountSettingsEntry> account_settings_entries = new ArrayList<AccountSettingsEntry>();
public ArrayList<EncryptionPreferencesEntry> encryption_preferences_entries = new ArrayList<EncryptionPreferencesEntry>();
public ArrayList<ContactDetailsProvider> contact_details_entries = new ArrayList<ContactDetailsProvider>();
public Map<string, TextCommand> text_commands = new HashMap<string, TextCommand>();
public Gee.List<ConversationAdditionPopulator> conversation_addition_populators = new ArrayList<ConversationAdditionPopulator>();
public Gee.List<NotificationPopulator> notification_populators = new ArrayList<NotificationPopulator>();
public Gee.Collection<ConversationTitlebarEntry> conversation_titlebar_entries = new Gee.TreeSet<ConversationTitlebarEntry>((a, b) => {
internal HashMap<Entities.Encryption, EncryptionListEntry> encryption_list_entries = new HashMap<Entities.Encryption, EncryptionListEntry>();
internal HashMap<string, CallEncryptionEntry> call_encryption_entries = new HashMap<string, CallEncryptionEntry>();
internal ArrayList<AccountSettingsEntry> account_settings_entries = new ArrayList<AccountSettingsEntry>();
internal ArrayList<ContactDetailsProvider> contact_details_entries = new ArrayList<ContactDetailsProvider>();
internal Map<string, TextCommand> text_commands = new HashMap<string, TextCommand>();
internal Gee.List<ConversationAdditionPopulator> conversation_addition_populators = new ArrayList<ConversationAdditionPopulator>();
internal Gee.List<NotificationPopulator> notification_populators = new ArrayList<NotificationPopulator>();
internal Gee.Collection<ConversationTitlebarEntry> conversation_titlebar_entries = new Gee.TreeSet<ConversationTitlebarEntry>((a, b) => {
return (int)(a.order - b.order);
});
public VideoCallPlugin? video_call_plugin;
@ -44,18 +43,6 @@ public class Registry {
}
}
public bool register_encryption_preferences_entry(EncryptionPreferencesEntry entry) {
lock(encryption_preferences_entries) {
foreach(var e in encryption_preferences_entries) {
if (e.id == entry.id) return false;
}
encryption_preferences_entries.add(entry);
// TODO: Order by priority
// encryption_preferences_entries.sort((a,b) => b.name.collate(a.name));
return true;
}
}
public bool register_contact_details_entry(ContactDetailsProvider entry) {
lock(contact_details_entries) {
foreach(ContactDetailsProvider e in contact_details_entries) {

View File

@ -12,7 +12,6 @@ public class AvatarManager : StreamInteractionModule, Object {
public string id { get { return IDENTITY.id; } }
public signal void received_avatar(Jid jid, Account account);
public signal void fetched_avatar(Jid jid, Account account);
private enum Source {
USER_AVATARS,
@ -24,7 +23,8 @@ public class AvatarManager : StreamInteractionModule, Object {
private string folder = null;
private HashMap<Jid, string> user_avatars = new HashMap<Jid, string>(Jid.hash_func, Jid.equals_func);
private HashMap<Jid, string> vcard_avatars = new HashMap<Jid, string>(Jid.hash_func, Jid.equals_func);
private HashSet<string> pending_fetch = new HashSet<string>();
private HashMap<string, Pixbuf> cached_pixbuf = new HashMap<string, Pixbuf>();
private HashMap<string, Gee.List<SourceFuncWrapper>> pending_pixbuf = new HashMap<string, Gee.List<SourceFuncWrapper>>();
private const int MAX_PIXEL = 192;
public static void start(StreamInteractor stream_interactor, Database db) {
@ -35,39 +35,8 @@ public class AvatarManager : StreamInteractionModule, Object {
private AvatarManager(StreamInteractor stream_interactor, Database db) {
this.stream_interactor = stream_interactor;
this.db = db;
File old_avatars = File.new_build_filename(Dino.get_storage_dir(), "avatars");
File new_avatars = File.new_build_filename(Dino.get_cache_dir(), "avatars");
this.folder = new_avatars.get_path();
// Move old avatar location to new one
if (old_avatars.query_exists()) {
if (!new_avatars.query_exists()) {
// Move old avatars folder (~/.local/share/dino) to new location (~/.cache/dino)
try {
new_avatars.get_parent().make_directory_with_parents();
} catch (Error e) { }
try {
old_avatars.move(new_avatars, FileCopyFlags.NONE);
debug("Avatars directory %s moved to %s", old_avatars.get_path(), new_avatars.get_path());
} catch (Error e) { }
} else {
// If both old and new folders exist, remove the old one
try {
FileEnumerator enumerator = old_avatars.enumerate_children("standard::*", FileQueryInfoFlags.NOFOLLOW_SYMLINKS);
FileInfo info = null;
while ((info = enumerator.next_file()) != null) {
FileUtils.remove(old_avatars.get_path() + "/" + info.get_name());
}
DirUtils.remove(old_avatars.get_path());
} catch (Error e) { }
}
}
// Create avatar folder
try {
new_avatars.make_directory_with_parents();
} catch (Error e) { }
this.folder = Path.build_filename(Dino.get_storage_dir(), "avatars");
DirUtils.create_with_parents(this.folder, 0700);
stream_interactor.account_added.connect(on_account_added);
stream_interactor.module_manager.initialize_account_modules.connect((_, modules) => {
@ -76,18 +45,6 @@ public class AvatarManager : StreamInteractionModule, Object {
});
}
public File? get_avatar_file(Account account, Jid jid_) {
string? hash = get_avatar_hash(account, jid_);
if (hash == null) return null;
File file = File.new_for_path(Path.build_filename(folder, hash));
if (!file.query_exists()) {
fetch_and_store_for_jid.begin(account, jid_);
return null;
} else {
return file;
}
}
private string? get_avatar_hash(Account account, Jid jid_) {
Jid jid = jid_;
if (!stream_interactor.get_module(MucManager.IDENTITY).is_groupchat_occupant(jid_, account)) {
@ -102,10 +59,79 @@ public class AvatarManager : StreamInteractionModule, Object {
}
}
public bool has_avatar_cached(Account account, Jid jid) {
string? hash = get_avatar_hash(account, jid);
return hash != null && cached_pixbuf.has_key(hash);
}
public bool has_avatar(Account account, Jid jid) {
return get_avatar_hash(account, jid) != null;
}
public Pixbuf? get_cached_avatar(Account account, Jid jid_) {
string? hash = get_avatar_hash(account, jid_);
if (hash == null) return null;
if (cached_pixbuf.has_key(hash)) return cached_pixbuf[hash];
return null;
}
public async Pixbuf? get_avatar(Account account, Jid jid_) {
Jid jid = jid_;
if (!stream_interactor.get_module(MucManager.IDENTITY).is_groupchat_occupant(jid_, account)) {
jid = jid_.bare_jid;
}
int source = -1;
string? hash = null;
if (user_avatars.has_key(jid)) {
hash = user_avatars[jid];
source = 1;
} else if (vcard_avatars.has_key(jid)) {
hash = vcard_avatars[jid];
source = 2;
}
if (hash == null) return null;
if (cached_pixbuf.has_key(hash)) {
return cached_pixbuf[hash];
}
XmppStream? stream = stream_interactor.get_stream(account);
if (stream == null || !stream.negotiation_complete) return null;
if (pending_pixbuf.has_key(hash)) {
pending_pixbuf[hash].add(new SourceFuncWrapper(get_avatar.callback));
yield;
return cached_pixbuf[hash];
}
pending_pixbuf[hash] = new ArrayList<SourceFuncWrapper>();
Pixbuf? image = yield get_image(hash);
if (image != null) {
cached_pixbuf[hash] = image;
} else {
Bytes? bytes = null;
if (source == 1) {
bytes = yield Xmpp.Xep.UserAvatars.fetch_image(stream, jid, hash);
} else if (source == 2) {
bytes = yield Xmpp.Xep.VCard.fetch_image(stream, jid, hash);
if (bytes == null && jid.is_bare()) {
db.avatar.delete().with(db.avatar.jid_id, "=", db.get_jid_id(jid)).perform();
}
}
if (bytes != null) {
store_image(hash, bytes);
image = yield get_image(hash);
}
cached_pixbuf[hash] = image;
}
foreach (SourceFuncWrapper sfw in pending_pixbuf[hash]) {
sfw.sfun();
}
return image;
}
public void publish(Account account, string file) {
try {
Pixbuf pixbuf = new Pixbuf.from_file(file);
@ -127,32 +153,30 @@ public class AvatarManager : StreamInteractionModule, Object {
}
}
public void unset_avatar(Account account) {
XmppStream stream = stream_interactor.get_stream(account);
if (stream == null) return;
Xmpp.Xep.UserAvatars.unset_avatar(stream);
}
private void on_account_added(Account account) {
stream_interactor.module_manager.get_module(account, Xep.UserAvatars.Module.IDENTITY).received_avatar_hash.connect((stream, jid, id) =>
on_user_avatar_received(account, jid, id)
on_user_avatar_received.begin(account, jid, id)
);
stream_interactor.module_manager.get_module(account, Xep.UserAvatars.Module.IDENTITY).avatar_removed.connect((stream, jid) => {
on_user_avatar_removed(account, jid);
});
stream_interactor.module_manager.get_module(account, Xep.VCard.Module.IDENTITY).received_avatar_hash.connect((stream, jid, id) =>
on_vcard_avatar_received(account, jid, id)
on_vcard_avatar_received.begin(account, jid, id)
);
foreach (var entry in get_avatar_hashes(account, Source.USER_AVATARS).entries) {
on_user_avatar_received(account, entry.key, entry.value);
user_avatars[entry.key] = entry.value;
}
foreach (var entry in get_avatar_hashes(account, Source.VCARD).entries) {
on_vcard_avatar_received(account, entry.key, entry.value);
// FIXME: remove. temporary to remove falsely saved avatars.
if (stream_interactor.get_module(MucManager.IDENTITY).is_groupchat(entry.key, account)) {
db.avatar.delete().with(db.avatar.jid_id, "=", db.get_jid_id(entry.key)).perform();
continue;
}
vcard_avatars[entry.key] = entry.value;
}
}
private void on_user_avatar_received(Account account, Jid jid_, string id) {
private async void on_user_avatar_received(Account account, Jid jid_, string id) {
Jid jid = jid_.bare_jid;
if (!user_avatars.has_key(jid) || user_avatars[jid] != id) {
@ -162,14 +186,7 @@ public class AvatarManager : StreamInteractionModule, Object {
received_avatar(jid, account);
}
private void on_user_avatar_removed(Account account, Jid jid_) {
Jid jid = jid_.bare_jid;
user_avatars.unset(jid);
remove_avatar_hash(account, jid, Source.USER_AVATARS);
received_avatar(jid, account);
}
private void on_vcard_avatar_received(Account account, Jid jid_, string id) {
private async void on_vcard_avatar_received(Account account, Jid jid_, string id) {
bool is_gc = stream_interactor.get_module(MucManager.IDENTITY).might_be_groupchat(jid_.bare_jid, account);
Jid jid = is_gc ? jid_ : jid_.bare_jid;
@ -191,14 +208,6 @@ public class AvatarManager : StreamInteractionModule, Object {
.perform();
}
public void remove_avatar_hash(Account account, Jid jid, int type) {
db.avatar.delete()
.with(db.avatar.jid_id, "=", db.get_jid_id(jid))
.with(db.avatar.account_id, "=", account.id)
.with(db.avatar.type_, "=", type)
.perform();
}
public HashMap<Jid, string> get_avatar_hashes(Account account, int type) {
HashMap<Jid, string> ret = new HashMap<Jid, string>(Jid.hash_func, Jid.equals_func);
foreach (Row row in db.avatar.select({db.avatar.jid_id, db.avatar.hash})
@ -209,53 +218,12 @@ public class AvatarManager : StreamInteractionModule, Object {
return ret;
}
public async bool fetch_and_store_for_jid(Account account, Jid jid) {
int source = -1;
string? hash = null;
if (user_avatars.has_key(jid)) {
hash = user_avatars[jid];
source = 1;
} else if (vcard_avatars.has_key(jid)) {
hash = vcard_avatars[jid];
source = 2;
} else {
return false;
}
XmppStream? stream = stream_interactor.get_stream(account);
if (stream == null || !stream.negotiation_complete) return false;
return yield fetch_and_store(stream, account, jid, source, hash);
}
private async bool fetch_and_store(XmppStream stream, Account account, Jid jid, int source, string? hash) {
if (hash == null || pending_fetch.contains(hash)) return false;
pending_fetch.add(hash);
Bytes? bytes = null;
if (source == 1) {
bytes = yield Xmpp.Xep.UserAvatars.fetch_image(stream, jid, hash);
} else if (source == 2) {
bytes = yield Xmpp.Xep.VCard.fetch_image(stream, jid, hash);
if (bytes == null && jid.is_bare()) {
db.avatar.delete().with(db.avatar.jid_id, "=", db.get_jid_id(jid)).perform();
}
}
if (bytes != null) {
yield store_image(hash, bytes);
fetched_avatar(jid, account);
}
pending_fetch.remove(hash);
return bytes != null;
}
private async void store_image(string id, Bytes data) {
public void store_image(string id, Bytes data) {
File file = File.new_for_path(Path.build_filename(folder, id));
try {
if (file.query_exists()) file.delete(); //TODO y?
DataOutputStream fos = new DataOutputStream(file.create(FileCreateFlags.REPLACE_DESTINATION));
yield fos.write_bytes_async(data);
fos.write_bytes_async.begin(data);
} catch (Error e) {
// Ignore: we failed in storing, so we refuse to display later...
}
@ -265,6 +233,29 @@ public class AvatarManager : StreamInteractionModule, Object {
File file = File.new_for_path(Path.build_filename(folder, id));
return file.query_exists();
}
public async Pixbuf? get_image(string id) {
try {
File file = File.new_for_path(Path.build_filename(folder, id));
FileInputStream stream = yield file.read_async(Priority.LOW);
uint8 fbuf[1024];
size_t size;
Checksum checksum = new Checksum (ChecksumType.SHA1);
while ((size = yield stream.read_async(fbuf, Priority.LOW)) > 0) {
checksum.update(fbuf, size);
}
if (checksum.get_string() != id) {
FileUtils.remove(file.get_path());
}
stream.seek(0, SeekType.SET);
return yield new Pixbuf.from_stream_async(stream, null);
} catch (Error e) {
return null;
}
}
}
}

View File

@ -9,7 +9,7 @@ public class Dino.PeerState : Object {
public signal void connection_ready();
public signal void session_terminated(bool we_terminated, string? reason_name, string? reason_text);
public signal void encryption_updated(Xep.Jingle.ContentEncryption? audio_encryption, Xep.Jingle.ContentEncryption? video_encryption);
public signal void encryption_updated(Xep.Jingle.ContentEncryption? audio_encryption, Xep.Jingle.ContentEncryption? video_encryption, bool same);
public StreamInteractor stream_interactor;
public CallState call_state;
@ -412,7 +412,7 @@ public class Dino.PeerState : Object {
if ((audio_encryptions != null && audio_encryptions.is_empty) || (video_encryptions != null && video_encryptions.is_empty)) {
call.encryption = Encryption.NONE;
encryption_updated(null, null);
encryption_updated(null, null, true);
return;
}
@ -462,7 +462,7 @@ public class Dino.PeerState : Object {
encryption_keys_same = true;
}
encryption_updated(audio_encryption, video_encryption);
encryption_updated(audio_encryption, video_encryption, encryption_keys_same);
}
}

View File

@ -18,7 +18,6 @@ public class Dino.CallState : Object {
public bool use_cim = false;
public string? cim_call_id = null;
public Jid? cim_counterpart = null;
public ArrayList<Jid> cim_jids_to_inform = new ArrayList<Jid>();
public string cim_message_type { get; set; default=Xmpp.MessageStanza.TYPE_CHAT; }
public Xep.Muji.GroupCall? group_call { get; set; }
@ -50,7 +49,7 @@ public class Dino.CallState : Object {
}
internal async void initiate_groupchat_call(Jid muc) {
cim_jids_to_inform.add(muc);
parent_muc = muc;
cim_message_type = MessageStanza.TYPE_GROUPCHAT;
if (this.group_call == null) yield convert_into_group_call();
@ -98,27 +97,29 @@ public class Dino.CallState : Object {
accepted = true;
call.state = Call.State.ESTABLISHING;
XmppStream stream = stream_interactor.get_stream(call.account);
if (stream == null) return;
if (use_cim) {
if (invited_to_group_call != null) {
join_group_call.begin(invited_to_group_call);
foreach (Jid jid_to_inform in cim_jids_to_inform) {
stream.get_module(Xep.CallInvites.Module.IDENTITY).send_muji_accept(stream, jid_to_inform, cim_call_id, invited_to_group_call, cim_message_type);
}
XmppStream stream = stream_interactor.get_stream(call.account);
if (stream == null) return;
StanzaNode? inner_node = null;
if (group_call != null) {
inner_node = new StanzaNode.build("muji", Xep.Muji.NS_URI).add_self_xmlns()
.put_attribute("room", group_call.muc_jid.to_string());
} else if (peers.size == 1) {
string sid = peers.values.to_array()[0].sid;
foreach (Jid jid_to_inform in cim_jids_to_inform) {
stream.get_module(Xep.CallInvites.Module.IDENTITY).send_jingle_accept(stream, jid_to_inform, cim_call_id, sid, cim_message_type);
foreach (PeerState peer in peers.values) {
inner_node = new StanzaNode.build("jingle", Xep.CallInvites.NS_URI)
.put_attribute("sid", peer.sid);
}
}
stream.get_module(Xep.CallInvites.Module.IDENTITY).send_accept(stream, cim_counterpart, cim_call_id, inner_node, cim_message_type);
} else {
foreach (PeerState peer in peers.values) {
peer.accept();
}
}
if (invited_to_group_call != null) {
join_group_call.begin(invited_to_group_call);
}
}
public void reject() {
@ -127,10 +128,7 @@ public class Dino.CallState : Object {
if (use_cim) {
XmppStream stream = stream_interactor.get_stream(call.account);
if (stream == null) return;
foreach (Jid jid_to_inform in cim_jids_to_inform) {
stream.get_module(Xep.CallInvites.Module.IDENTITY).send_reject(stream, jid_to_inform, cim_call_id, cim_message_type);
}
stream.get_module(Xep.CallInvites.Module.IDENTITY).send_reject(stream, cim_counterpart, cim_call_id, cim_message_type);
}
var peers_cpy = new ArrayList<PeerState>();
peers_cpy.add_all(peers.values);
@ -144,38 +142,32 @@ public class Dino.CallState : Object {
var peers_cpy = new ArrayList<PeerState>();
peers_cpy.add_all(peers.values);
// Terminate sessions, send out messages about the ended call, exit MUC if applicable
XmppStream stream = stream_interactor.get_stream(call.account);
if (stream != null) {
if (group_call != null) {
if (group_call != null) {
XmppStream stream = stream_interactor.get_stream(call.account);
if (stream != null) {
stream.get_module(Xep.Muc.Module.IDENTITY).exit(stream, group_call.muc_jid);
}
if (call.state == Call.State.IN_PROGRESS || call.state == Call.State.ESTABLISHING) {
foreach (PeerState peer in peers_cpy) {
peer.end(Xep.Jingle.ReasonElement.SUCCESS, reason_text);
}
if (use_cim) {
foreach (Jid jid_to_inform in cim_jids_to_inform) {
stream.get_module(Xep.CallInvites.Module.IDENTITY).send_left(stream, jid_to_inform, cim_call_id, cim_message_type);
}
}
} else if (call.state == Call.State.RINGING) {
foreach (PeerState peer in peers_cpy) {
peer.end(Xep.Jingle.ReasonElement.CANCEL, reason_text);
}
if (call.direction == Call.DIRECTION_OUTGOING && use_cim) {
foreach (Jid jid_to_inform in cim_jids_to_inform) {
stream.get_module(Xep.CallInvites.Module.IDENTITY).send_retract(stream, jid_to_inform, cim_call_id, cim_message_type);
}
}
}
}
// Update the call state
if (call.state == Call.State.IN_PROGRESS || call.state == Call.State.ESTABLISHING) {
foreach (PeerState peer in peers_cpy) {
peer.end(Xep.Jingle.ReasonElement.SUCCESS, reason_text);
}
if (use_cim) {
XmppStream stream = stream_interactor.get_stream(call.account);
if (stream == null) return;
stream.get_module(Xep.CallInvites.Module.IDENTITY).send_finish(stream, cim_counterpart, cim_call_id, cim_message_type);
}
call.state = Call.State.ENDED;
} else if (call.state == Call.State.RINGING) {
foreach (PeerState peer in peers_cpy) {
peer.end(Xep.Jingle.ReasonElement.CANCEL, reason_text);
}
if (call.direction == Call.DIRECTION_OUTGOING && use_cim) {
XmppStream stream = stream_interactor.get_stream(call.account);
if (stream == null) return;
stream.get_module(Xep.CallInvites.Module.IDENTITY).send_retract(stream, cim_counterpart, cim_call_id, cim_message_type);
}
call.state = Call.State.MISSED;
} else {
return;

View File

@ -61,6 +61,8 @@ namespace Dino {
call_state.initiate_groupchat_call.begin(conversation.counterpart);
}
conversation.last_active = call.time;
call_outgoing(call, call_state, conversation);
return call_state;
@ -70,14 +72,14 @@ namespace Dino {
Plugins.VideoCallPlugin? plugin = Application.get_default().plugin_registry.video_call_plugin;
if (plugin == null) return false;
return plugin.supported();
return plugin.supports(null);
}
public async bool can_conversation_do_calls(Conversation conversation) {
if (!can_we_do_calls(conversation.account)) return false;
if (conversation.type_ == Conversation.Type.CHAT) {
return !conversation.counterpart.equals_bare(conversation.account.bare_jid);
return (yield get_call_resources(conversation.account, conversation.counterpart)).size > 0 || has_jmi_resources(conversation.counterpart);
} else {
bool is_private = stream_interactor.get_module(MucManager.IDENTITY).is_private_room(conversation.account, conversation.counterpart);
return is_private && can_initiate_groupcall(conversation.account);
@ -219,6 +221,7 @@ namespace Dino {
Conversation conversation = stream_interactor.get_module(ConversationManager.IDENTITY).create_conversation(call.counterpart.bare_jid, account, Conversation.Type.CHAT);
stream_interactor.get_module(CallStore.IDENTITY).add_call(call, conversation);
conversation.last_active = call.time;
var call_state = new CallState(call, stream_interactor);
connect_call_state_signals(call_state);
@ -291,12 +294,12 @@ namespace Dino {
Conversation? conversation = stream_interactor.get_module(ConversationManager.IDENTITY).get_conversation(inviter_jid.bare_jid, account);
if (conversation == null) return null;
stream_interactor.get_module(CallStore.IDENTITY).add_call(call, conversation);
conversation.last_active = call.time;
CallState call_state = new CallState(call, stream_interactor);
connect_call_state_signals(call_state);
call_state.invited_to_group_call = muc_jid;
call_state.use_cim = true;
call_state.cim_jids_to_inform.add(inviter_jid.bare_jid);
call_state.parent_muc = inviter_jid.bare_jid;
debug("[%s] on_muji_call_received accepting", account.bare_jid.to_string());
@ -457,11 +460,12 @@ namespace Dino {
call_state.use_cim = true;
call_state.cim_call_id = call_id;
call_state.cim_jids_to_inform.add(message_stanza.type_ == MessageStanza.TYPE_GROUPCHAT ? from_jid.bare_jid : from_jid);
call_state.cim_counterpart = message_stanza.type_ == MessageStanza.TYPE_GROUPCHAT ? from_jid.bare_jid : from_jid;
call_state.cim_message_type = message_stanza.type_;
Conversation? conversation = stream_interactor.get_module(ConversationManager.IDENTITY).approx_conversation_for_stanza(from_jid, to_jid, account, message_stanza.type_);
if (conversation == null) return;
conversation.last_active = call_state.call.time;
if (call_state.call.direction == Call.DIRECTION_INCOMING) {
call_incoming(call_state.call, call_state, conversation, video_requested, multiparty);

View File

@ -114,7 +114,7 @@ public class ConnectionManager : Object {
Timeout.add_seconds(60, () => {
foreach (Account account in connections.keys) {
if (connections[account].last_activity == null ||
if (connections[account].last_activity != null &&
connections[account].last_activity.compare(new DateTime.now_utc().add_minutes(-1)) < 0) {
check_reconnect(account);
}
@ -179,12 +179,13 @@ public class ConnectionManager : Object {
}
}
private async void connect_stream(Account account) {
private async void connect_stream(Account account, string? resource = null) {
if (!connections.has_key(account)) return;
debug("[%s] (Maybe) Establishing a new connection", account.bare_jid.to_string());
connection_errors.unset(account);
if (resource == null) resource = account.resourcepart;
XmppStreamResult stream_result;
@ -200,7 +201,7 @@ public class ConnectionManager : Object {
connection_directly_retry[account] = false;
change_connection_state(account, ConnectionState.CONNECTING);
stream_result = yield Xmpp.establish_stream(account.bare_jid, module_manager.get_modules(account), log_options,
stream_result = yield Xmpp.establish_stream(account.bare_jid, module_manager.get_modules(account, resource), log_options,
(peer_cert, errors) => { return on_invalid_certificate(account.domainpart, peer_cert, errors); }
);
connections[account].stream = stream_result.stream;
@ -225,7 +226,7 @@ public class ConnectionManager : Object {
XmppStream stream = stream_result.stream;
debug("[%s] New connection: %p", account.full_jid.to_string(), stream);
debug("[%s] New connection with resource %s: %p", account.bare_jid.to_string(), resource, stream);
connections[account].established = new DateTime.now_utc();
stream.attached_modules.connect((stream) => {
@ -254,7 +255,6 @@ public class ConnectionManager : Object {
debug("[%s %p] Connection error: %s", account.bare_jid.to_string(), stream, e.message);
change_connection_state(account, ConnectionState.DISCONNECTED);
if (!connections.has_key(account)) return;
connections[account].reset();
StreamError.Flag? flag = stream.get_flag(StreamError.Flag.IDENTITY);
@ -263,8 +263,7 @@ public class ConnectionManager : Object {
set_connection_error(account, new ConnectionError(ConnectionError.Source.STREAM_ERROR, flag.error_type));
if (flag.resource_rejected) {
account.set_random_resource();
connect_stream.begin(account);
connect_stream.begin(account, account.resourcepart + "-" + random_uuid());
return;
}
}

View File

@ -1,58 +0,0 @@
using Xmpp;
using Gee;
using Qlite;
using Dino.Entities;
public class Dino.Model.ConversationDisplayName : Object {
public string display_name { get; set; }
}
namespace Dino {
public class ContactModels : StreamInteractionModule, Object {
public static ModuleIdentity<ContactModels> IDENTITY = new ModuleIdentity<ContactModels>("contact_models");
public string id { get { return IDENTITY.id; } }
private StreamInteractor stream_interactor;
private HashMap<Conversation, Model.ConversationDisplayName> conversation_models = new HashMap<Conversation, Model.ConversationDisplayName>(Conversation.hash_func, Conversation.equals_func);
public static void start(StreamInteractor stream_interactor) {
ContactModels m = new ContactModels(stream_interactor);
stream_interactor.add_module(m);
}
private ContactModels(StreamInteractor stream_interactor) {
this.stream_interactor = stream_interactor;
stream_interactor.get_module(MucManager.IDENTITY).room_info_updated.connect((account, jid) => {
check_update_models(account, jid, Conversation.Type.GROUPCHAT);
});
stream_interactor.get_module(MucManager.IDENTITY).private_room_occupant_updated.connect((account, room, occupant) => {
check_update_models(account, room, Conversation.Type.GROUPCHAT);
});
stream_interactor.get_module(MucManager.IDENTITY).subject_set.connect((account, jid, subject) => {
check_update_models(account, jid, Conversation.Type.GROUPCHAT);
});
stream_interactor.get_module(RosterManager.IDENTITY).updated_roster_item.connect((account, jid, roster_item) => {
check_update_models(account, jid, Conversation.Type.CHAT);
});
}
private void check_update_models(Account account, Jid jid, Conversation.Type conversation_ty) {
var conversation = stream_interactor.get_module(ConversationManager.IDENTITY).get_conversation(jid, account, conversation_ty);
if (conversation == null) return;
var display_name_model = conversation_models[conversation];
if (display_name_model == null) return;
display_name_model.display_name = Dino.get_conversation_display_name(stream_interactor, conversation, "%s (%s)");
}
public Model.ConversationDisplayName get_display_name_model(Conversation conversation) {
if (conversation_models.has_key(conversation)) return conversation_models[conversation];
var model = new Model.ConversationDisplayName();
model.display_name = Dino.get_conversation_display_name(stream_interactor, conversation, "%s (%s)");
conversation_models[conversation] = model;
return model;
}
}
}

View File

@ -121,7 +121,13 @@ public class ContentItemStore : StreamInteractionModule, Object {
Message? message = get_message_for_content_item(conversation, content_item);
if (message == null) return null;
return MessageStorage.get_reference_id(message);
if (message.edit_to != null) return message.edit_to;
if (conversation.type_ == Conversation.Type.CHAT) {
return message.stanza_id;
} else {
return message.server_id;
}
}
public Jid? get_message_sender_for_content_item(Conversation conversation, ContentItem content_item) {

View File

@ -29,8 +29,6 @@ public class ConversationManager : StreamInteractionModule, Object {
stream_interactor.account_removed.connect(on_account_removed);
stream_interactor.get_module(MessageProcessor.IDENTITY).received_pipeline.connect(new MessageListener(stream_interactor));
stream_interactor.get_module(MessageProcessor.IDENTITY).message_sent.connect(handle_sent_message);
stream_interactor.get_module(Calls.IDENTITY).call_incoming.connect(handle_new_call);
stream_interactor.get_module(Calls.IDENTITY).call_outgoing.connect(handle_new_call);
}
public Conversation create_conversation(Jid jid, Account account, Conversation.Type? type = null) {
@ -48,28 +46,18 @@ public class ConversationManager : StreamInteractionModule, Object {
// Create a new converation
Conversation conversation = new Conversation(jid, account, type);
// Set encryption for conversation
if (type == Conversation.Type.CHAT ||
(type == Conversation.Type.GROUPCHAT && stream_interactor.get_module(MucManager.IDENTITY).is_private_room(account, jid))) {
conversation.encryption = Application.get_default().settings.get_default_encryption(account);
} else {
conversation.encryption = Encryption.NONE;
}
add_conversation(conversation);
conversation.persist(db);
return conversation;
}
public Conversation? get_conversation_for_message(Entities.Message message) {
if (conversations.has_key(message.account)) {
if (message.type_ == Entities.Message.Type.CHAT) {
return create_conversation(message.counterpart.bare_jid, message.account, Conversation.Type.CHAT);
} else if (message.type_ == Entities.Message.Type.GROUPCHAT) {
return create_conversation(message.counterpart.bare_jid, message.account, Conversation.Type.GROUPCHAT);
} else if (message.type_ == Entities.Message.Type.GROUPCHAT_PM) {
return create_conversation(message.counterpart, message.account, Conversation.Type.GROUPCHAT_PM);
}
if (message.type_ == Entities.Message.Type.CHAT) {
return create_conversation(message.counterpart.bare_jid, message.account, Conversation.Type.CHAT);
} else if (message.type_ == Entities.Message.Type.GROUPCHAT) {
return create_conversation(message.counterpart.bare_jid, message.account, Conversation.Type.GROUPCHAT);
} else if (message.type_ == Entities.Message.Type.GROUPCHAT_PM) {
return create_conversation(message.counterpart, message.account, Conversation.Type.GROUPCHAT_PM);
}
return null;
}
@ -206,11 +194,6 @@ public class ConversationManager : StreamInteractionModule, Object {
}
}
private void handle_new_call(Call call, CallState state, Conversation conversation) {
conversation.last_active = call.time;
start_conversation(conversation);
}
private void add_conversation(Conversation conversation) {
if (!conversations[conversation.account].has_key(conversation.counterpart)) {
conversations[conversation.account][conversation.counterpart] = new ArrayList<Conversation>(Conversation.equals_func);

View File

@ -7,7 +7,7 @@ using Dino.Entities;
namespace Dino {
public class Database : Qlite.Database {
private const int VERSION = 29;
private const int VERSION = 25;
public class AccountTable : Table {
public Column<int> id = new Column.Integer("id") { primary_key = true, auto_increment = true };
@ -17,7 +17,6 @@ public class Database : Qlite.Database {
public Column<string> alias = new Column.Text("alias");
public Column<bool> enabled = new Column.BoolInt("enabled");
public Column<string> roster_version = new Column.Text("roster_version") { min_version=2 };
// no longer used. all usages already removed. remove db column at some point.
public Column<long> mam_earliest_synced = new Column.Long("mam_earliest_synced") { min_version=4 };
internal AccountTable(Database db) {
@ -94,11 +93,6 @@ public class Database : Qlite.Database {
// deduplication
index("message_account_counterpart_stanzaid_idx", {account_id, counterpart_id, stanza_id});
index("message_account_counterpart_serverid_idx", {account_id, counterpart_id, server_id});
// message by marked
index("message_account_marked_idx", {account_id, marked});
fts({body});
}
}
@ -180,7 +174,6 @@ public class Database : Qlite.Database {
public class FileTransferTable : Table {
public Column<int> id = new Column.Integer("id") { primary_key = true, auto_increment = true };
public Column<string> file_sharing_id = new Column.Text("file_sharing_id") { min_version=28 };
public Column<int> account_id = new Column.Integer("account_id") { not_null = true };
public Column<int> counterpart_id = new Column.Integer("counterpart_id") { not_null = true };
public Column<string> counterpart_resource = new Column.Text("counterpart_resource");
@ -192,58 +185,15 @@ public class Database : Qlite.Database {
public Column<string> file_name = new Column.Text("file_name");
public Column<string> path = new Column.Text("path");
public Column<string> mime_type = new Column.Text("mime_type");
public Column<long> size = new Column.Long("size");
public Column<int> size = new Column.Integer("size");
public Column<int> state = new Column.Integer("state");
public Column<int> provider = new Column.Integer("provider");
public Column<string> info = new Column.Text("info");
public Column<long> modification_date = new Column.Long("modification_date") { default = "-1", min_version=28 };
public Column<int> width = new Column.Integer("width") { default = "-1", min_version=28 };
public Column<int> height = new Column.Integer("height") { default = "-1", min_version=28 };
public Column<long> length = new Column.Integer("length") { default = "-1", min_version=28 };
internal FileTransferTable(Database db) {
base(db, "file_transfer");
init({id, file_sharing_id, account_id, counterpart_id, counterpart_resource, our_resource, direction,
time, local_time, encryption, file_name, path, mime_type, size, state, provider, info, modification_date,
width, height, length});
}
}
public class FileHashesTable : Table {
public Column<int> id = new Column.Integer("id");
public Column<string> algo = new Column.Text("algo") { not_null = true };
public Column<string> value = new Column.Text("value") { not_null = true };
internal FileHashesTable(Database db) {
base(db, "file_hashes");
init({id, algo, value});
unique({id, algo}, "REPLACE");
}
}
public class FileThumbnailsTable : Table {
public Column<int> id = new Column.Integer("id");
// TODO store data as bytes, not as data uri
public Column<string> uri = new Column.Text("uri") { not_null = true };
public Column<string> mime_type = new Column.Text("mime_type");
public Column<int> width = new Column.Integer("width");
public Column<int> height = new Column.Integer("height");
internal FileThumbnailsTable(Database db) {
base(db, "file_thumbnails");
init({id, uri, mime_type, width, height});
}
}
public class SourcesTable : Table {
public Column<int> file_transfer_id = new Column.Integer("file_transfer_id");
public Column<string> type = new Column.Text("type") { not_null = true };
public Column<string> data = new Column.Text("data") { not_null = true };
internal SourcesTable(Database db) {
base(db, "sfs_sources");
init({file_transfer_id, type, data});
index("sfs_sources_file_transfer_id_idx", {file_transfer_id});
init({id, account_id, counterpart_id, counterpart_resource, our_resource, direction, time, local_time,
encryption, file_name, path, mime_type, size, state, provider, info});
}
}
@ -346,11 +296,10 @@ public class Database : Qlite.Database {
public Column<string> jid = new Column.Text("jid");
public Column<string> handle = new Column.Text("name");
public Column<string> subscription = new Column.Text("subscription");
public Column<string> ask = new Column.Text("ask") { min_version=29 };
internal RosterTable(Database db) {
base(db, "roster");
init({account_id, jid, handle, subscription, ask});
init({account_id, jid, handle, subscription});
unique({account_id, jid}, "IGNORE");
}
}
@ -399,29 +348,6 @@ public class Database : Qlite.Database {
}
}
public class AccountSettingsTable : Table {
public Column<int> id = new Column.Integer("id") { primary_key = true, auto_increment = true };
public Column<int> account_id = new Column.Integer("account_id") { not_null = true };
public Column<string> key = new Column.Text("key") { not_null = true };
public Column<string> value = new Column.Text("value");
internal AccountSettingsTable(Database db) {
base(db, "account_settings");
init({id, account_id, key, value});
unique({account_id, key}, "REPLACE");
}
public string? get_value(int account_id, string key) {
var row_opt = select({value})
.with(this.account_id, "=", account_id)
.with(this.key, "=", key)
.single()
.row();
if (row_opt.is_present()) return row_opt[value];
return null;
}
}
public class ConversationSettingsTable : Table {
public Column<int> id = new Column.Integer("id") { primary_key = true, auto_increment = true };
public Column<int> conversation_id = new Column.Integer("conversation_id") {not_null=true};
@ -446,9 +372,6 @@ public class Database : Qlite.Database {
public RealJidTable real_jid { get; private set; }
public OccupantIdTable occupantid { get; private set; }
public FileTransferTable file_transfer { get; private set; }
public FileHashesTable file_hashes { get; private set; }
public FileThumbnailsTable file_thumbnails { get; private set; }
public SourcesTable sfs_sources { get; private set; }
public CallTable call { get; private set; }
public CallCounterpartTable call_counterpart { get; private set; }
public ConversationTable conversation { get; private set; }
@ -459,7 +382,6 @@ public class Database : Qlite.Database {
public MamCatchupTable mam_catchup { get; private set; }
public ReactionTable reaction { get; private set; }
public SettingsTable settings { get; private set; }
public AccountSettingsTable account_settings { get; private set; }
public ConversationSettingsTable conversation_settings { get; private set; }
public Map<int, Jid> jid_table_cache = new HashMap<int, Jid>();
@ -479,9 +401,6 @@ public class Database : Qlite.Database {
occupantid = new OccupantIdTable(this);
real_jid = new RealJidTable(this);
file_transfer = new FileTransferTable(this);
file_hashes = new FileHashesTable(this);
file_thumbnails = new FileThumbnailsTable(this);
sfs_sources = new SourcesTable(this);
call = new CallTable(this);
call_counterpart = new CallCounterpartTable(this);
conversation = new ConversationTable(this);
@ -492,9 +411,8 @@ public class Database : Qlite.Database {
mam_catchup = new MamCatchupTable(this);
reaction = new ReactionTable(this);
settings = new SettingsTable(this);
account_settings = new AccountSettingsTable(this);
conversation_settings = new ConversationSettingsTable(this);
init({ account, jid, entity, content_item, message, body_meta, message_correction, reply, real_jid, occupantid, file_transfer, file_hashes, file_thumbnails, sfs_sources, call, call_counterpart, conversation, avatar, entity_identity, entity_feature, roster, mam_catchup, reaction, settings, account_settings, conversation_settings });
init({ account, jid, entity, content_item, message, body_meta, message_correction, reply, real_jid, occupantid, file_transfer, call, call_counterpart, conversation, avatar, entity_identity, entity_feature, roster, mam_catchup, reaction, settings, conversation_settings });
try {
exec("PRAGMA journal_mode = WAL");
@ -652,9 +570,6 @@ public class Database : Qlite.Database {
foreach(Row row in account.select()) {
try {
Account account = new Account.from_row(this, row);
if (account_table_cache.has_key(account.id)) {
account = account_table_cache[account.id];
}
ret.add(account);
account_table_cache[account.id] = account;
} catch (InvalidJidError e) {

View File

@ -90,20 +90,6 @@ public class EntityInfo : StreamInteractionModule, Object {
return info_result.features.contains(feature);
}
public bool has_feature_offline(Account account, Jid jid, string feature) {
int ret = has_feature_cached_int(account, jid, feature);
if (ret == -1) {
return db.entity.select()
.with(db.entity.account_id, "=", account.id)
.with(db.entity.jid_id, "=", db.get_jid_id(jid))
.with(db.entity.resource, "=", jid.resourcepart ?? "")
.join_with(db.entity_feature, db.entity.caps_hash, db.entity_feature.entity)
.with(db.entity_feature.feature, "=", feature)
.count() > 0;
}
return ret == 1;
}
public bool has_feature_cached(Account account, Jid jid, string feature) {
return has_feature_cached_int(account, jid, feature) == 1;
}
@ -217,24 +203,13 @@ public class EntityInfo : StreamInteractionModule, Object {
ServiceDiscovery.InfoResult? info_result = yield stream.get_module(ServiceDiscovery.Module.IDENTITY).request_info(stream, jid);
if (info_result == null) return null;
var computed_hash = EntityCapabilities.Module.compute_hash_for_info_result(info_result);
if (hash == null || computed_hash == hash) {
db.entity.upsert()
.value(db.entity.account_id, account.id, true)
.value(db.entity.jid_id, db.get_jid_id(jid), true)
.value(db.entity.resource, jid.resourcepart ?? "", true)
.value(db.entity.last_seen, (long)(new DateTime.now_local()).to_unix())
.value(db.entity.caps_hash, computed_hash)
.perform();
store_features(computed_hash, info_result.features);
store_identities(computed_hash, info_result.identities);
if (hash != null && EntityCapabilities.Module.compute_hash_for_info_result(info_result) == hash) {
store_features(hash, info_result.features);
store_identities(hash, info_result.identities);
} else {
warning("Claimed entity caps hash from %s doesn't match computed one", jid.to_string());
jid_features[jid] = info_result.features;
jid_identity[jid] = info_result.identities;
}
jid_features[jid] = info_result.features;
jid_identity[jid] = info_result.identities;
return info_result;
}

View File

@ -46,9 +46,20 @@ public class Dino.FallbackBody : StreamInteractionModule, Object {
if (fallbacks.is_empty) return false;
foreach (var fallback in fallbacks) {
if (fallback.ns_uri != Xep.Replies.NS_URI) continue; // TODO what if it's not
if (fallback.ns_uri != Xep.Replies.NS_URI) continue;
foreach (var location in fallback.locations) {
db.body_meta.insert()
.value(db.body_meta.message_id, message.id)
.value(db.body_meta.info_type, Xep.FallbackIndication.NS_URI)
.value(db.body_meta.info, fallback.ns_uri)
.value(db.body_meta.from_char, location.from_char)
.value(db.body_meta.to_char, location.to_char)
.perform();
}
message.set_fallbacks(fallbacks);
}
message.set_fallbacks(fallbacks);
return false;
}

View File

@ -2,7 +2,6 @@ using Gdk;
using Gee;
using Xmpp;
using Xmpp.Xep;
using Dino.Entities;
namespace Dino {
@ -20,12 +19,6 @@ public class FileManager : StreamInteractionModule, Object {
private Gee.List<FileEncryptor> file_encryptors = new ArrayList<FileEncryptor>();
private Gee.List<FileDecryptor> file_decryptors = new ArrayList<FileDecryptor>();
private Gee.List<FileProvider> file_providers = new ArrayList<FileProvider>();
private Gee.List<FileMetadataProvider> file_metadata_providers = new ArrayList<FileMetadataProvider>();
public StatelessFileSharing sfs {
owned get { return stream_interactor.get_module(StatelessFileSharing.IDENTITY); }
private set { }
}
public static void start(StreamInteractor stream_interactor, Database db) {
FileManager m = new FileManager(stream_interactor, db);
@ -43,24 +36,6 @@ public class FileManager : StreamInteractionModule, Object {
this.add_provider(new JingleFileProvider(stream_interactor));
this.add_sender(new JingleFileSender(stream_interactor));
this.add_metadata_provider(new GenericFileMetadataProvider());
this.add_metadata_provider(new ImageFileMetadataProvider());
}
public const int HTTP_PROVIDER_ID = 0;
public const int SFS_PROVIDER_ID = 2;
public FileProvider? select_file_provider(FileTransfer file_transfer) {
bool http_usable = file_transfer.provider == SFS_PROVIDER_ID;
foreach (FileProvider file_provider in this.file_providers) {
if (file_transfer.provider == file_provider.get_id()) {
return file_provider;
}
if (http_usable && file_provider.get_id() == HTTP_PROVIDER_ID) {
return file_provider;
}
}
return null;
}
public async HashMap<int, long> get_file_size_limits(Conversation conversation) {
@ -85,15 +60,11 @@ public class FileManager : StreamInteractionModule, Object {
file_transfer.local_time = new DateTime.now_utc();
file_transfer.encryption = conversation.encryption;
Xep.FileMetadataElement.FileMetadata metadata = new Xep.FileMetadataElement.FileMetadata();
foreach (FileMetadataProvider file_metadata_provider in this.file_metadata_providers) {
if (file_metadata_provider.supports_file(file)) {
yield file_metadata_provider.fill_metadata(file, metadata);
}
}
file_transfer.file_metadata = metadata;
try {
FileInfo file_info = file.query_info("*", FileQueryInfoFlags.NONE);
file_transfer.file_name = file_info.get_display_name();
file_transfer.mime_type = file_info.get_content_type();
file_transfer.size = (int)file_info.get_size();
file_transfer.input_stream = yield file.read_async();
yield save_file(file_transfer);
@ -148,20 +119,7 @@ public class FileManager : StreamInteractionModule, Object {
file_send_data = file_encryptor.preprocess_send_file(conversation, file_transfer, file_send_data, file_meta);
}
file_transfer.state = FileTransfer.State.IN_PROGRESS;
// Update current download progress in the FileTransfer
LimitInputStream? limit_stream = file_transfer.input_stream as LimitInputStream;
if (limit_stream == null) {
limit_stream = new LimitInputStream(file_transfer.input_stream, file_meta.size);
file_transfer.input_stream = limit_stream;
}
if (limit_stream != null) {
limit_stream.bind_property("retrieved-bytes", file_transfer, "transferred-bytes", BindingFlags.SYNC_CREATE);
}
yield file_sender.send_file(conversation, file_transfer, file_send_data, file_meta);
file_transfer.state = FileTransfer.State.COMPLETE;
} catch (Error e) {
warning("Send file error: %s", e.message);
@ -172,7 +130,12 @@ public class FileManager : StreamInteractionModule, Object {
public async void download_file(FileTransfer file_transfer) {
Conversation conversation = stream_interactor.get_module(ConversationManager.IDENTITY).get_conversation(file_transfer.counterpart.bare_jid, file_transfer.account);
FileProvider? file_provider = this.select_file_provider(file_transfer);
FileProvider? file_provider = null;
foreach (FileProvider fp in file_providers) {
if (file_transfer.provider == fp.get_id()) {
file_provider = fp;
}
}
yield download_file_internal(file_provider, file_transfer, conversation);
}
@ -211,10 +174,6 @@ public class FileManager : StreamInteractionModule, Object {
file_decryptors.add(decryptor);
}
public void add_metadata_provider(FileMetadataProvider file_metadata_provider) {
file_metadata_providers.add(file_metadata_provider);
}
public bool is_sender_trustworthy(FileTransfer file_transfer, Conversation conversation) {
if (file_transfer.direction == FileTransfer.DIRECTION_SENT) return true;
@ -252,11 +211,7 @@ public class FileManager : StreamInteractionModule, Object {
private async void download_file_internal(FileProvider file_provider, FileTransfer file_transfer, Conversation conversation) {
try {
// Get meta info
FileReceiveData? receive_data = file_provider.get_file_receive_data(file_transfer);
if (receive_data == null) {
warning("Don't have download data (yet)");
return;
}
FileReceiveData receive_data = file_provider.get_file_receive_data(file_transfer);
FileDecryptor? file_decryptor = null;
foreach (FileDecryptor decryptor in file_decryptors) {
if (decryptor.can_decrypt_file(conversation, file_transfer, receive_data)) {
@ -271,6 +226,9 @@ public class FileManager : StreamInteractionModule, Object {
FileMeta file_meta = yield get_file_meta(file_provider, file_transfer, conversation, receive_data);
InputStream? input_stream = null;
// Download and decrypt file
file_transfer.state = FileTransfer.State.IN_PROGRESS;
@ -278,23 +236,15 @@ public class FileManager : StreamInteractionModule, Object {
file_meta = file_decryptor.prepare_download_file(conversation, file_transfer, receive_data, file_meta);
}
InputStream download_input_stream = yield file_provider.download(file_transfer, receive_data, file_meta);
InputStream input_stream = download_input_stream;
input_stream = yield file_provider.download(file_transfer, receive_data, file_meta);
if (file_decryptor != null) {
input_stream = yield file_decryptor.decrypt_file(input_stream, conversation, file_transfer, receive_data);
}
// Update current download progress in the FileTransfer
LimitInputStream? limit_stream = download_input_stream as LimitInputStream;
if (limit_stream != null) {
limit_stream.bind_property("retrieved-bytes", file_transfer, "transferred-bytes", BindingFlags.SYNC_CREATE);
}
// Save file
string filename = Random.next_int().to_string("%x") + "_" + file_transfer.file_name;
File file = File.new_for_path(Path.build_filename(get_storage_dir(), filename));
// libsoup doesn't properly support splicing
OutputStream os = file.create(FileCreateFlags.REPLACE_DESTINATION);
uint8[] buffer = new uint8[1024];
ssize_t read;
@ -305,49 +255,20 @@ public class FileManager : StreamInteractionModule, Object {
}
yield input_stream.close_async(Priority.LOW, file_transfer.cancellable);
yield os.close_async(Priority.LOW, file_transfer.cancellable);
// Verify the hash of the downloaded file, if it is known
var supported_hashes = Xep.CryptographicHashes.get_supported_hashes(file_transfer.hashes);
if (!supported_hashes.is_empty) {
var checksum_types = new ArrayList<ChecksumType>();
var hashes = new HashMap<ChecksumType, string>();
foreach (var hash in supported_hashes) {
var checksum_type = Xep.CryptographicHashes.hash_string_to_type(hash.algo);
checksum_types.add(checksum_type);
hashes[checksum_type] = hash.val;
}
var computed_hashes = yield compute_file_hashes(file, checksum_types);
foreach (var checksum_type in hashes.keys) {
if (hashes[checksum_type] != computed_hashes[checksum_type]) {
warning("Hash of downloaded file does not equal advertised hash, discarding: %s. %s should be %s, was %s",
file_transfer.file_name, checksum_type.to_string(), hashes[checksum_type], computed_hashes[checksum_type]);
FileUtils.remove(file.get_path());
file_transfer.state = FileTransfer.State.FAILED;
return;
}
}
}
file_transfer.path = file.get_basename();
file_transfer.input_stream = yield file.read_async();
FileInfo file_info = file_transfer.get_file().query_info("*", FileQueryInfoFlags.NONE);
file_transfer.mime_type = file_info.get_content_type();
file_transfer.state = FileTransfer.State.COMPLETE;
} catch (IOError.CANCELLED e) {
print("cancelled\n");
} catch (Error e) {
warning("Error downloading file: %s", e.message);
if (file_transfer.provider == 0 || file_transfer.provider == FileManager.SFS_PROVIDER_ID) {
file_transfer.state = FileTransfer.State.NOT_STARTED;
} else {
file_transfer.state = FileTransfer.State.FAILED;
}
file_transfer.state = FileTransfer.State.FAILED;
}
}
public FileTransfer create_file_transfer_from_provider_incoming(FileProvider file_provider, string info, Jid from, DateTime time, DateTime local_time, Conversation conversation, FileReceiveData receive_data, FileMeta file_meta) {
private async void handle_incoming_file(FileProvider file_provider, string info, Jid from, DateTime time, DateTime local_time, Conversation conversation, FileReceiveData receive_data, FileMeta file_meta) {
FileTransfer file_transfer = new FileTransfer();
file_transfer.account = conversation.account;
file_transfer.counterpart = file_transfer.direction == FileTransfer.DIRECTION_RECEIVED ? from : conversation.counterpart;
@ -355,13 +276,8 @@ public class FileManager : StreamInteractionModule, Object {
file_transfer.ourpart = stream_interactor.get_module(MucManager.IDENTITY).get_own_jid(conversation.counterpart, conversation.account) ?? conversation.account.bare_jid;
file_transfer.direction = from.equals(file_transfer.ourpart) ? FileTransfer.DIRECTION_SENT : FileTransfer.DIRECTION_RECEIVED;
} else {
if (from.equals_bare(conversation.account.bare_jid)) {
file_transfer.ourpart = from;
file_transfer.direction = FileTransfer.DIRECTION_SENT;
} else {
file_transfer.ourpart = conversation.account.full_jid;
file_transfer.direction = FileTransfer.DIRECTION_RECEIVED;
}
file_transfer.ourpart = conversation.account.full_jid;
file_transfer.direction = from.equals_bare(file_transfer.ourpart) ? FileTransfer.DIRECTION_SENT : FileTransfer.DIRECTION_RECEIVED;
}
file_transfer.time = time;
file_transfer.local_time = local_time;
@ -379,11 +295,6 @@ public class FileManager : StreamInteractionModule, Object {
}
}
return file_transfer;
}
private async void handle_incoming_file(FileProvider file_provider, string info, Jid from, DateTime time, DateTime local_time, Conversation conversation, FileReceiveData receive_data, FileMeta file_meta) {
FileTransfer file_transfer = create_file_transfer_from_provider_incoming(file_provider, info, from, time, local_time, conversation, receive_data, file_meta);
stream_interactor.get_module(FileTransferStorage.IDENTITY).add_file(file_transfer);
if (is_sender_trustworthy(file_transfer, conversation)) {
@ -409,10 +320,10 @@ public class FileManager : StreamInteractionModule, Object {
string filename = Random.next_int().to_string("%x") + "_" + file_transfer.file_name;
File file = File.new_for_path(Path.build_filename(get_storage_dir(), filename));
OutputStream os = file.create(FileCreateFlags.REPLACE_DESTINATION);
yield os.splice_async(file_transfer.input_stream, OutputStreamSpliceFlags.CLOSE_SOURCE | OutputStreamSpliceFlags.CLOSE_TARGET);
yield os.splice_async(file_transfer.input_stream, OutputStreamSpliceFlags.CLOSE_SOURCE|OutputStreamSpliceFlags.CLOSE_TARGET);
file_transfer.state = FileTransfer.State.COMPLETE;
file_transfer.path = filename;
file_transfer.input_stream = new LimitInputStream(yield file.read_async(), file_transfer.size);
file_transfer.input_stream = yield file.read_async();
} catch (Error e) {
throw new FileSendError.SAVE_FAILED("Saving file error: %s".printf(e.message));
}
@ -425,10 +336,10 @@ public errordomain FileSendError {
SAVE_FAILED
}
// Get rid of this Error and pass IoErrors instead - DOWNLOAD_FAILED already removed
public errordomain FileReceiveError {
GET_METADATA_FAILED,
DECRYPTION_FAILED
DECRYPTION_FAILED,
DOWNLOAD_FAILED
}
public class FileMeta {
@ -466,7 +377,7 @@ public interface FileProvider : Object {
public abstract FileReceiveData? get_file_receive_data(FileTransfer file_transfer);
public abstract async FileMeta get_meta_info(FileTransfer file_transfer, FileReceiveData receive_data, FileMeta file_meta) throws FileReceiveError;
public abstract async InputStream download(FileTransfer file_transfer, FileReceiveData receive_data, FileMeta file_meta) throws IOError;
public abstract async InputStream download(FileTransfer file_transfer, FileReceiveData receive_data, FileMeta file_meta) throws FileReceiveError;
public abstract int get_id();
}

View File

@ -14,8 +14,6 @@ namespace Dino {
private Database db;
private WeakMap<int, FileTransfer> files_by_db_id = new WeakMap<int, FileTransfer>();
private WeakMap<int, FileTransfer> files_by_message_id = new WeakMap<int, FileTransfer>();
private WeakMap<string, FileTransfer> files_by_message_and_file_id = new WeakMap<string, FileTransfer>();
public static void start(StreamInteractor stream_interactor, Database db) {
FileTransferStorage m = new FileTransferStorage(stream_interactor, db);
@ -43,42 +41,6 @@ namespace Dino {
return create_file_from_row_opt(row_option, conversation);
}
// Http file transfers store the corresponding message id in the `info` field
public FileTransfer? get_file_by_message_id(int id, Conversation conversation) {
FileTransfer? file_transfer = files_by_message_id[id];
if (file_transfer != null) {
return file_transfer;
}
RowOption row_option = db.file_transfer.select()
.with(db.file_transfer.info, "=", id.to_string())
.single()
.row();
return create_file_from_row_opt(row_option, conversation);
}
public FileTransfer get_files_by_message_and_file_id(int message_id, string file_sharing_id, Conversation conversation) {
string combined_identifier = message_id.to_string() + file_sharing_id;
FileTransfer? file_transfer = files_by_message_and_file_id[combined_identifier];
if (file_transfer == null) {
RowOption row_option = db.file_transfer.select()
.with(db.file_transfer.info, "=", message_id.to_string())
.with(db.file_transfer.file_sharing_id, "=", file_sharing_id)
.single()
.row();
file_transfer = create_file_from_row_opt(row_option, conversation);
}
// There can be collisions in the combined identifier, check it's the correct FileTransfer
if (file_transfer != null && file_transfer.info == message_id.to_string() && file_transfer.file_sharing_id == file_sharing_id) {
return file_transfer;
}
return null;
}
private FileTransfer? create_file_from_row_opt(RowOption row_opt, Conversation conversation) {
if (!row_opt.is_present()) return null;
@ -99,15 +61,6 @@ namespace Dino {
private void cache_file(FileTransfer file_transfer) {
files_by_db_id[file_transfer.id] = file_transfer;
if (file_transfer.info != null && file_transfer.info != "") {
files_by_message_id[int.parse(file_transfer.info)] = file_transfer;
if (file_transfer.file_sharing_id != null && file_transfer.info != null) {
string combined_identifier = file_transfer.info + file_transfer.file_sharing_id;
files_by_message_and_file_id[combined_identifier] = file_transfer;
}
}
}
}
}

View File

@ -90,9 +90,11 @@ public class Dino.HistorySync {
if (!is_muc_mam && !from_our_server) return;
// Get the server time of the message and store it in `mam_times`
string? id = message.stanza.get_deep_attribute(Xmpp.MessageArchiveManagement.NS_URI + ":result", "id");
Xmpp.MessageArchiveManagement.Flag? mam_flag = stream != null ? stream.get_flag(Xmpp.MessageArchiveManagement.Flag.IDENTITY) : null;
if (mam_flag == null) return;
string? id = message.stanza.get_deep_attribute(mam_flag.ns_ver + ":result", "id");
if (id == null) return;
StanzaNode? delay_node = message.stanza.get_deep_subnode(Xmpp.MessageArchiveManagement.NS_URI + ":result", StanzaForwarding.NS_URI + ":forwarded", DelayedDelivery.NS_URI + ":delay");
StanzaNode? delay_node = message.stanza.get_deep_subnode(mam_flag.ns_ver + ":result", StanzaForwarding.NS_URI + ":forwarded", DelayedDelivery.NS_URI + ":delay");
if (delay_node == null) {
warning("MAM result did not contain delayed time %s", message.stanza.to_string());
return;
@ -102,7 +104,7 @@ public class Dino.HistorySync {
mam_times[account][id] = time;
// Check if this is the target message
string? query_id = message.stanza.get_deep_attribute(Xmpp.MessageArchiveManagement.NS_URI + ":result", Xmpp.MessageArchiveManagement.NS_URI + ":queryid");
string? query_id = message.stanza.get_deep_attribute(mam_flag.ns_ver + ":result", mam_flag.ns_ver + ":queryid");
if (query_id != null && id == catchup_until_id[account]) {
debug("[%s] Hitted range (id) %s", account.bare_jid.to_string(), id);
hitted_range[query_id] = -2;
@ -161,7 +163,7 @@ public class Dino.HistorySync {
if (current_row[db.mam_catchup.from_end]) return;
debug("[%s] Fetching between ranges %s - %s", mam_server.to_string(), previous_row[db.mam_catchup.to_time].to_string(), current_row[db.mam_catchup.from_time].to_string());
current_row = yield fetch_between_ranges(account, mam_server, previous_row, current_row, cancellable);
current_row = yield fetch_between_ranges(account, mam_server, previous_row, current_row);
if (current_row == null) return;
RowOption previous_row_opt = db.mam_catchup.select()
@ -212,11 +214,13 @@ public class Dino.HistorySync {
return null;
}
// If we get PageResult.Duplicate, we still want to update the db row to the latest message.
// Catchup finished within first page. Update latest db entry.
if (latest_row_id != -1 &&
page_result.page_result in new PageResult[] { PageResult.TargetReached, PageResult.NoMoreMessages }) {
page_result.page_result in new PageResult[] { PageResult.TargetReached, PageResult.NoMoreMessages, PageResult.Duplicate }) {
if (page_result.stanzas == null) return null;
if (page_result.stanzas == null || page_result.stanzas.is_empty) return null;
string latest_mam_id = page_result.query_result.last;
long latest_mam_time = (long) mam_times[account][latest_mam_id].to_unix();
@ -268,7 +272,7 @@ public class Dino.HistorySync {
** Merges the `earlier_range` db row into the `later_range` db row.
** @return The resulting range comprising `earlier_range`, `later_rage`, and everything in between. null if fetching/merge failed.
**/
private async Row? fetch_between_ranges(Account account, Jid mam_server, Row earlier_range, Row later_range, Cancellable? cancellable = null) {
private async Row? fetch_between_ranges(Account account, Jid mam_server, Row earlier_range, Row later_range) {
int later_range_id = (int) later_range[db.mam_catchup.id];
DateTime earliest_time = new DateTime.from_unix_utc(earlier_range[db.mam_catchup.to_time]);
DateTime latest_time = new DateTime.from_unix_utc(later_range[db.mam_catchup.from_time]);
@ -278,9 +282,9 @@ public class Dino.HistorySync {
earliest_time, earlier_range[db.mam_catchup.to_id],
latest_time, later_range[db.mam_catchup.from_id]);
PageRequestResult page_result = yield fetch_query(account, query_params, later_range_id, cancellable);
PageRequestResult page_result = yield fetch_query(account, query_params, later_range_id);
if (page_result.page_result == PageResult.TargetReached || page_result.page_result == PageResult.NoMoreMessages) {
if (page_result.page_result == PageResult.TargetReached) {
debug("[%s | %s] Merging range %i into %i", account.bare_jid.to_string(), mam_server.to_string(), earlier_range[db.mam_catchup.id], later_range_id);
// Merge earlier range into later one.
db.mam_catchup.update()
@ -326,9 +330,9 @@ public class Dino.HistorySync {
PageRequestResult? page_result = null;
do {
page_result = yield get_mam_page(account, query_params, page_result, cancellable);
debug("[%s | %s] Page result %s (got stanzas: %s)", account.bare_jid.to_string(), query_params.mam_server.to_string(), page_result.page_result.to_string(), (page_result.stanzas != null).to_string());
debug("Page result %s %b", page_result.page_result.to_string(), page_result.stanzas == null);
if (page_result.page_result == PageResult.Error || page_result.page_result == PageResult.Cancelled || page_result.query_result.first == null) return page_result;
if (page_result.page_result == PageResult.Error || page_result.page_result == PageResult.Cancelled || page_result.stanzas == null) return page_result;
string earliest_mam_id = page_result.query_result.first;
long earliest_mam_time = (long)mam_times[account][earliest_mam_id].to_unix();
@ -353,6 +357,7 @@ public class Dino.HistorySync {
MorePagesAvailable,
TargetReached,
NoMoreMessages,
Duplicate,
Error,
Cancelled
}
@ -388,28 +393,29 @@ public class Dino.HistorySync {
page_result = PageResult.NoMoreMessages;
}
string selection = null;
string[] selection_args = {};
string query_id = query_params.query_id;
string? after_id = query_params.start_id;
var stanzas_for_query = stanzas.has_key(query_id) && !stanzas[query_id].is_empty ? stanzas[query_id] : null;
if (cancellable != null && cancellable.is_cancelled()) {
stanzas.unset(query_id);
return new PageRequestResult(PageResult.Cancelled, query_result, stanzas_for_query);
return new PageRequestResult(PageResult.Cancelled, query_result, stanzas[query_id]);
}
if (stanzas_for_query != null) {
if (stanzas.has_key(query_id) && !stanzas[query_id].is_empty) {
// Check it we reached our target (from_id)
foreach (Xmpp.MessageStanza message in stanzas_for_query) {
foreach (Xmpp.MessageStanza message in stanzas[query_id]) {
Xmpp.MessageArchiveManagement.MessageFlag? mam_message_flag = Xmpp.MessageArchiveManagement.MessageFlag.get_flag(message);
if (mam_message_flag != null && mam_message_flag.mam_id != null) {
if (after_id != null && mam_message_flag.mam_id == after_id) {
// Successfully fetched the whole range
yield send_messages_back_into_pipeline(account, query_id, cancellable);
if (cancellable != null && cancellable.is_cancelled()) {
return new PageRequestResult(PageResult.Cancelled, query_result, stanzas_for_query);
return new PageRequestResult(PageResult.Cancelled, query_result, stanzas[query_id]);
}
return new PageRequestResult(PageResult.TargetReached, query_result, stanzas_for_query);
return new PageRequestResult(PageResult.TargetReached, query_result, stanzas[query_id]);
}
}
}
@ -417,9 +423,37 @@ public class Dino.HistorySync {
// Message got filtered out by xmpp-vala, but succesful range fetch nevertheless
yield send_messages_back_into_pipeline(account, query_id);
if (cancellable != null && cancellable.is_cancelled()) {
return new PageRequestResult(PageResult.Cancelled, query_result, stanzas_for_query);
return new PageRequestResult(PageResult.Cancelled, query_result, stanzas[query_id]);
}
return new PageRequestResult(PageResult.TargetReached, query_result, stanzas_for_query);
return new PageRequestResult(PageResult.TargetReached, query_result, stanzas[query_id]);
}
// Check for duplicates. Go through all messages and build a db query.
foreach (Xmpp.MessageStanza message in stanzas[query_id]) {
Xmpp.MessageArchiveManagement.MessageFlag? mam_message_flag = Xmpp.MessageArchiveManagement.MessageFlag.get_flag(message);
if (mam_message_flag != null && mam_message_flag.mam_id != null) {
if (selection == null) {
selection = @"$(db.message.server_id) = ?";
} else {
selection += @" OR $(db.message.server_id) = ?";
}
selection_args += mam_message_flag.mam_id;
}
}
var duplicates_qry = db.message.select()
.with(db.message.account_id, "=", account.id)
.where(selection, selection_args);
// We don't want messages from different MAM servers to interfere with each other.
if (!query_params.mam_server.equals_bare(account.bare_jid)) {
duplicates_qry.with(db.message.counterpart_id, "=", db.get_jid_id(query_params.mam_server));
} else {
duplicates_qry.with(db.message.type_, "=", Message.Type.CHAT);
}
var duplicates_count = duplicates_qry.count();
if (duplicates_count > 0) {
// We got a duplicate although we thought we have to catch up.
// There was a server bug where prosody would send all messages if it didn't know the after ID that was given
page_result = PageResult.Duplicate;
}
}
@ -427,7 +461,7 @@ public class Dino.HistorySync {
if (cancellable != null && cancellable.is_cancelled()) {
page_result = PageResult.Cancelled;
}
return new PageRequestResult(page_result, query_result, stanzas_for_query);
return new PageRequestResult(page_result, query_result, stanzas.has_key(query_id) ? stanzas[query_id] : null);
}
private async void send_messages_back_into_pipeline(Account account, string query_id, Cancellable? cancellable = null) {

View File

@ -95,15 +95,19 @@ public class JingleFileProvider : FileProvider, Object {
return Encryption.NONE;
}
public async InputStream download(FileTransfer file_transfer, FileReceiveData receive_data, FileMeta file_meta) throws IOError {
public async InputStream download(FileTransfer file_transfer, FileReceiveData receive_data, FileMeta file_meta) throws FileReceiveError {
// TODO(hrxi) What should happen if `stream == null`?
XmppStream? stream = stream_interactor.get_stream(file_transfer.account);
Xmpp.Xep.JingleFileTransfer.FileTransfer? jingle_file_transfer = file_transfers[file_transfer.info];
if (jingle_file_transfer == null) {
throw new IOError.FAILED("Transfer data not available anymore");
throw new FileReceiveError.DOWNLOAD_FAILED("Transfer data not available anymore");
}
yield jingle_file_transfer.accept(stream);
return new LimitInputStream(jingle_file_transfer.stream, file_meta.size);
try {
yield jingle_file_transfer.accept(stream);
} catch (IOError e) {
throw new FileReceiveError.DOWNLOAD_FAILED("Establishing connection did not work");
}
return jingle_file_transfer.stream;
}
public int get_id() {

View File

@ -39,21 +39,27 @@ public class MessageCorrection : StreamInteractionModule, MessageListener {
});
}
public void set_correction(Conversation conversation, Message message, Message old_message) {
string reference_stanza_id = old_message.edit_to ?? old_message.stanza_id;
public void send_correction(Conversation conversation, Message old_message, string correction_text) {
string stanza_id = old_message.edit_to ?? old_message.stanza_id;
outstanding_correction_nodes[message.stanza_id] = reference_stanza_id;
Message out_message = stream_interactor.get_module(MessageProcessor.IDENTITY).create_out_message(correction_text, conversation);
out_message.edit_to = stanza_id;
out_message.quoted_item_id = old_message.quoted_item_id;
outstanding_correction_nodes[out_message.stanza_id] = stanza_id;
stream_interactor.get_module(MessageProcessor.IDENTITY).send_xmpp_message(out_message, conversation);
db.message_correction.insert()
.value(db.message_correction.message_id, message.id)
.value(db.message_correction.to_stanza_id, reference_stanza_id)
.perform();
.value(db.message_correction.message_id, out_message.id)
.value(db.message_correction.to_stanza_id, stanza_id)
.perform();
db.content_item.update()
.with(db.content_item.foreign_id, "=", old_message.id)
.with(db.content_item.content_type, "=", 1)
.set(db.content_item.foreign_id, message.id)
.perform();
.with(db.content_item.foreign_id, "=", old_message.id)
.with(db.content_item.content_type, "=", 1)
.set(db.content_item.foreign_id, out_message.id)
.perform();
on_received_correction(conversation, out_message.id);
}
public bool is_own_correction_allowed(Conversation conversation, Message message) {
@ -139,7 +145,7 @@ public class MessageCorrection : StreamInteractionModule, MessageListener {
return false;
}
public void on_received_correction(Conversation conversation, int message_id) {
private void on_received_correction(Conversation conversation, int message_id) {
ContentItem? content_item = stream_interactor.get_module(ContentItemStore.IDENTITY).get_item_by_foreign(conversation, 1, message_id);
if (content_item != null) {
received_correction(content_item);

View File

@ -38,7 +38,7 @@ public class MessageProcessor : StreamInteractionModule, Object {
received_pipeline.connect(new FilterMessageListener());
received_pipeline.connect(new StoreMessageListener(this, stream_interactor));
received_pipeline.connect(new StoreContentItemListener(stream_interactor));
received_pipeline.connect(new MarkupListener(stream_interactor));
received_pipeline.connect(new MamMessageListener(stream_interactor));
stream_interactor.account_added.connect(on_account_added);
@ -46,6 +46,18 @@ public class MessageProcessor : StreamInteractionModule, Object {
stream_interactor.stream_resumed.connect(send_unsent_chat_messages);
}
public Entities.Message send_text(string text, Conversation conversation) {
Entities.Message message = create_out_message(text, conversation);
return send_message(message, conversation);
}
public Entities.Message send_message(Entities.Message message, Conversation conversation) {
stream_interactor.get_module(ContentItemStore.IDENTITY).insert_message(message, conversation);
send_xmpp_message(message, conversation);
message_sent(message, conversation);
return message;
}
private void convert_sending_to_unsent_msgs(Account account) {
db.message.update()
.with(db.message.account_id, "=", account.id)
@ -156,22 +168,21 @@ public class MessageProcessor : StreamInteractionModule, Object {
new_message.counterpart = counterpart_override ?? (new_message.direction == Entities.Message.DIRECTION_SENT ? message.to : message.from);
new_message.ourpart = new_message.direction == Entities.Message.DIRECTION_SENT ? message.from : message.to;
XmppStream? stream = stream_interactor.get_stream(account);
Xmpp.MessageArchiveManagement.MessageFlag? mam_message_flag = Xmpp.MessageArchiveManagement.MessageFlag.get_flag(message);
Xmpp.MessageArchiveManagement.Flag? mam_flag = stream != null ? stream.get_flag(Xmpp.MessageArchiveManagement.Flag.IDENTITY) : null;
EntityInfo entity_info = stream_interactor.get_module(EntityInfo.IDENTITY);
if (mam_message_flag != null && mam_message_flag.mam_id != null) {
bool server_does_mam = entity_info.has_feature_cached(account, account.bare_jid, Xmpp.MessageArchiveManagement.NS_URI);
if (server_does_mam) {
new_message.server_id = mam_message_flag.mam_id;
}
if (mam_message_flag != null && mam_flag != null && mam_flag.ns_ver == Xmpp.MessageArchiveManagement.NS_URI_2 && mam_message_flag.mam_id != null) {
new_message.server_id = mam_message_flag.mam_id;
} else if (message.type_ == Xmpp.MessageStanza.TYPE_GROUPCHAT) {
bool server_supports_sid = (yield entity_info.has_feature(account, new_message.counterpart.bare_jid, Xep.UniqueStableStanzaIDs.NS_URI)) ||
(yield entity_info.has_feature(account, new_message.counterpart.bare_jid, Xmpp.MessageArchiveManagement.NS_URI));
(yield entity_info.has_feature(account, new_message.counterpart.bare_jid, Xmpp.MessageArchiveManagement.NS_URI_2));
if (server_supports_sid) {
new_message.server_id = Xep.UniqueStableStanzaIDs.get_stanza_id(message, new_message.counterpart.bare_jid);
}
} else if (message.type_ == Xmpp.MessageStanza.TYPE_CHAT) {
bool server_supports_sid = (yield entity_info.has_feature(account, account.bare_jid, Xep.UniqueStableStanzaIDs.NS_URI)) ||
(yield entity_info.has_feature(account, account.bare_jid, Xmpp.MessageArchiveManagement.NS_URI));
(yield entity_info.has_feature(account, account.bare_jid, Xmpp.MessageArchiveManagement.NS_URI_2));
if (server_supports_sid) {
new_message.server_id = Xep.UniqueStableStanzaIDs.get_stanza_id(message, account.bare_jid);
}
@ -307,8 +318,7 @@ public class MessageProcessor : StreamInteractionModule, Object {
public override string[] after_actions { get { return after_actions_const; } }
public override async bool run(Entities.Message message, Xmpp.MessageStanza stanza, Conversation conversation) {
return message.body == null &&
Xep.StatelessFileSharing.get_file_shares(stanza) == null;
return (message.body == null);
}
}
@ -327,30 +337,13 @@ public class MessageProcessor : StreamInteractionModule, Object {
}
public override async bool run(Entities.Message message, Xmpp.MessageStanza stanza, Conversation conversation) {
if (message.body == null || outer.is_duplicate(message, stanza, conversation)) return true;
stream_interactor.get_module(MessageStorage.IDENTITY).add_message(message, conversation);
return false;
}
}
private class MarkupListener : MessageListener {
public string[] after_actions_const = new string[]{ "STORE" };
public override string action_group { get { return "Markup"; } }
public override string[] after_actions { get { return after_actions_const; } }
private StreamInteractor stream_interactor;
public MarkupListener(StreamInteractor stream_interactor) {
this.stream_interactor = stream_interactor;
}
public override async bool run(Entities.Message message, Xmpp.MessageStanza stanza, Conversation conversation) {
Gee.List<MessageMarkup.Span> markups = MessageMarkup.get_spans(stanza);
message.persist_markups(markups, message.id);
return false;
}
}
private class StoreContentItemListener : MessageListener {
public string[] after_actions_const = new string[]{ "DEDUPLICATE", "DECRYPT", "FILTER_EMPTY", "STORE", "CORRECTION", "MESSAGE_REINTERPRETING" };
@ -370,7 +363,30 @@ public class MessageProcessor : StreamInteractionModule, Object {
}
}
public Entities.Message create_out_message(string? text, Conversation conversation) {
private class MamMessageListener : MessageListener {
public string[] after_actions_const = new string[]{ "DEDUPLICATE" };
public override string action_group { get { return "MAM_NODE"; } }
public override string[] after_actions { get { return after_actions_const; } }
private StreamInteractor stream_interactor;
public MamMessageListener(StreamInteractor stream_interactor) {
this.stream_interactor = stream_interactor;
}
public override async bool run(Entities.Message message, Xmpp.MessageStanza stanza, Conversation conversation) {
bool is_mam_message = Xmpp.MessageArchiveManagement.MessageFlag.get_flag(stanza) != null;
XmppStream? stream = stream_interactor.get_stream(conversation.account);
Xmpp.MessageArchiveManagement.Flag? mam_flag = stream != null ? stream.get_flag(Xmpp.MessageArchiveManagement.Flag.IDENTITY) : null;
if (is_mam_message || (mam_flag != null && mam_flag.cought_up == true)) {
conversation.account.mam_earliest_synced = message.local_time;
}
return false;
}
}
public Entities.Message create_out_message(string text, Conversation conversation) {
Entities.Message message = new Entities.Message(text);
message.type_ = Util.get_message_type_for_conversation(conversation);
message.stanza_id = random_uuid();
@ -413,22 +429,8 @@ public class MessageProcessor : StreamInteractionModule, Object {
new_message.type_ = MessageStanza.TYPE_CHAT;
}
if (message.quoted_item_id != 0) {
ContentItem? quoted_content_item = stream_interactor.get_module(ContentItemStore.IDENTITY).get_item_by_id(conversation, message.quoted_item_id);
if (quoted_content_item != null) {
Jid? quoted_sender = message.from;
string? quoted_stanza_id = stream_interactor.get_module(ContentItemStore.IDENTITY).get_message_id_for_content_item(conversation, quoted_content_item);
if (quoted_sender != null && quoted_stanza_id != null) {
Xep.Replies.set_reply_to(new_message, new Xep.Replies.ReplyTo(quoted_sender, quoted_stanza_id));
}
foreach (var fallback in message.get_fallbacks()) {
Xep.FallbackIndication.set_fallback(new_message, fallback);
}
}
}
MessageMarkup.add_spans(new_message, message.get_markups());
string? fallback = get_fallback_body_set_infos(message, new_message, conversation);
new_message.body = fallback == null ? message.body : fallback + message.body;
build_message_stanza(message, new_message, conversation);
pre_message_send(message, new_message, conversation);
@ -477,6 +479,27 @@ public class MessageProcessor : StreamInteractionModule, Object {
}
});
}
public string? get_fallback_body_set_infos(Entities.Message message, MessageStanza new_stanza, Conversation conversation) {
if (message.quoted_item_id == 0) return null;
ContentItem? content_item = stream_interactor.get_module(ContentItemStore.IDENTITY).get_item_by_id(conversation, message.quoted_item_id);
if (content_item == null) return null;
Jid? quoted_sender = stream_interactor.get_module(ContentItemStore.IDENTITY).get_message_sender_for_content_item(conversation, content_item);
string? quoted_stanza_id = stream_interactor.get_module(ContentItemStore.IDENTITY).get_message_id_for_content_item(conversation, content_item);
if (quoted_sender != null && quoted_stanza_id != null) {
Xep.Replies.set_reply_to(new_stanza, new Xep.Replies.ReplyTo(quoted_sender, quoted_stanza_id));
}
string fallback = FallbackBody.get_quoted_fallback_body(content_item);
long fallback_length = fallback.length;
var fallback_location = new Xep.FallbackIndication.FallbackLocation(0, (int)fallback_length);
Xep.FallbackIndication.set_fallback(new_stanza, new Xep.FallbackIndication.Fallback(Xep.Replies.NS_URI, new Xep.FallbackIndication.FallbackLocation[] { fallback_location }));
return fallback;
}
}
public abstract class MessageListener : Xmpp.OrderedListener {

View File

@ -99,14 +99,6 @@ public class MessageStorage : StreamInteractionModule, Object {
return create_message_from_row_opt(row_option, conversation);
}
public Message? get_message_by_referencing_id(string id, Conversation conversation) {
if (conversation.type_ == Conversation.Type.CHAT) {
return stream_interactor.get_module(MessageStorage.IDENTITY).get_message_by_stanza_id(id, conversation);
} else {
return stream_interactor.get_module(MessageStorage.IDENTITY).get_message_by_server_id(id, conversation);
}
}
public Message? get_message_by_stanza_id(string stanza_id, Conversation conversation) {
if (messages_by_stanza_id.has_key(conversation)) {
Message? message = messages_by_stanza_id[conversation][stanza_id];
@ -199,16 +191,6 @@ public class MessageStorage : StreamInteractionModule, Object {
message_refs.remove_at(message_refs.size - 1);
}
}
public static string? get_reference_id(Message message) {
if (message.edit_to != null) return message.edit_to;
if (message.type_ == Message.Type.CHAT) {
return message.stanza_id;
} else {
return message.server_id;
}
}
}
}

View File

@ -24,7 +24,7 @@ public class ModuleManager {
return null;
}
public ArrayList<XmppStreamModule> get_modules(Account account) {
public ArrayList<XmppStreamModule> get_modules(Account account, string? resource = null) {
ArrayList<XmppStreamModule> modules = new ArrayList<XmppStreamModule>();
lock (module_map) {
@ -34,7 +34,7 @@ public class ModuleManager {
foreach (XmppStreamModule module in module_map[account]) {
if (module.get_id() == Bind.Module.IDENTITY.id) {
((Bind.Module) module).requested_resource = account.resourcepart;
((Bind.Module) module).requested_resource = resource ?? account.resourcepart;
} else if (module.get_id() == Sasl.Module.IDENTITY.id) {
((Sasl.Module) module).password = account.password;
}
@ -59,7 +59,6 @@ public class ModuleManager {
module_map[account].add(new Xmpp.MessageModule());
module_map[account].add(new Xmpp.MessageArchiveManagement.Module());
module_map[account].add(new Xep.MessageCarbons.Module());
module_map[account].add(new Xep.BitsOfBinary.Module());
module_map[account].add(new Xep.Muc.Module());
module_map[account].add(new Xep.Pubsub.Module());
module_map[account].add(new Xep.MessageDeliveryReceipts.Module());

View File

@ -21,7 +21,7 @@ public class MucManager : StreamInteractionModule, Object {
public signal void conference_removed(Account account, Jid jid);
private StreamInteractor stream_interactor;
private HashMap<Account, HashSet<Jid>> mucs_joined = new HashMap<Account, HashSet<Jid>>(Account.hash_func, Account.equals_func);
private HashMap<Account, HashSet<Jid>> mucs_todo = new HashMap<Account, HashSet<Jid>>(Account.hash_func, Account.equals_func);
private HashMap<Account, HashSet<Jid>> mucs_joining = new HashMap<Account, HashSet<Jid>>(Account.hash_func, Account.equals_func);
private HashMap<Account, HashMap<Jid, Cancellable>> mucs_sync_cancellables = new HashMap<Account, HashMap<Jid, Cancellable>>(Account.hash_func, Account.equals_func);
private HashMap<Jid, Xep.Muc.MucEnterError> enter_errors = new HashMap<Jid, Xep.Muc.MucEnterError>(Jid.hash_func, Jid.equals_func);
@ -54,7 +54,6 @@ public class MucManager : StreamInteractionModule, Object {
}
return true;
});
stream_interactor.get_module(MessageProcessor.IDENTITY).build_message_stanza.connect(on_build_message_stanza);
}
// already_autojoin: Without this flag we'd be retrieving bookmarks (to check for autojoin) from the sender on every join
@ -73,7 +72,7 @@ public class MucManager : StreamInteractionModule, Object {
bool receive_history = true;
EntityInfo entity_info = stream_interactor.get_module(EntityInfo.IDENTITY);
bool can_do_mam = yield entity_info.has_feature(account, jid, Xmpp.MessageArchiveManagement.NS_URI);
bool can_do_mam = yield entity_info.has_feature(account, jid, Xmpp.MessageArchiveManagement.NS_URI_2);
if (can_do_mam) {
receive_history = false;
history_since = null;
@ -84,6 +83,11 @@ public class MucManager : StreamInteractionModule, Object {
}
mucs_joining[account].add(jid);
if (!mucs_todo.has_key(account)) {
mucs_todo[account] = new HashSet<Jid>(Jid.hash_bare_func, Jid.equals_bare_func);
}
mucs_todo[account].add(jid.with_resource(nick_));
Muc.JoinResult? res = yield stream.get_module(Xep.Muc.Module.IDENTITY).enter(stream, jid.bare_jid, nick_, password, history_since, receive_history, null);
mucs_joining[account].remove(jid);
@ -122,26 +126,20 @@ public class MucManager : StreamInteractionModule, Object {
enter_errors[jid] = res.muc_error;
}
if (!mucs_joined.has_key(account)) {
mucs_joined[account] = new HashSet<Jid>(Jid.hash_bare_func, Jid.equals_bare_func);
}
mucs_joined[account].add(jid.with_resource(res.nick ?? nick_));
return res;
}
public void part(Account account, Jid jid) {
if (mucs_joined.has_key(account) && mucs_joined[account].contains(jid)) {
mucs_joined[account].remove(jid);
}
if (!mucs_todo.has_key(account) || !mucs_todo[account].contains(jid)) return;
mucs_todo[account].remove(jid);
XmppStream? stream = stream_interactor.get_stream(account);
if (stream != null) {
unset_autojoin(account, stream, jid);
stream.get_module(Xep.Muc.Module.IDENTITY).exit(stream, jid.bare_jid);
}
if (stream == null) return;
unset_autojoin(account, stream, jid);
stream.get_module(Xep.Muc.Module.IDENTITY).exit(stream, jid.bare_jid);
Conversation? conversation = stream_interactor.get_module(ConversationManager.IDENTITY).get_conversation(jid, account, Conversation.Type.GROUPCHAT);
Conversation? conversation = stream_interactor.get_module(ConversationManager.IDENTITY).get_conversation(jid, account);
if (conversation != null) stream_interactor.get_module(ConversationManager.IDENTITY).close_conversation(conversation);
cancel_sync(account, jid);
@ -183,9 +181,9 @@ public class MucManager : StreamInteractionModule, Object {
conversation.nickname = new_nick;
if (mucs_joined.has_key(conversation.account)) {
mucs_joined[conversation.account].remove(conversation.counterpart);
mucs_joined[conversation.account].add(conversation.counterpart.with_resource(new_nick));
if (mucs_todo.has_key(conversation.account)) {
mucs_todo[conversation.account].remove(conversation.counterpart);
mucs_todo[conversation.account].add(conversation.counterpart.with_resource(new_nick));
}
// Update nick in bookmark
@ -233,8 +231,15 @@ public class MucManager : StreamInteractionModule, Object {
//the term `private room` is a short hand for members-only+non-anonymous rooms
public bool is_private_room(Account account, Jid jid) {
var entity_info = stream_interactor.get_module(EntityInfo.IDENTITY);
return entity_info.has_feature_offline(account, jid, "muc_membersonly") && entity_info.has_feature_offline(account, jid, "muc_nonanonymous");
XmppStream? stream = stream_interactor.get_stream(account);
if (stream == null) {
return false;
}
Xep.Muc.Flag? flag = stream.get_flag(Xep.Muc.Flag.IDENTITY);
if (flag == null) {
return false;
}
return flag.has_room_feature(jid, Xep.Muc.Feature.NON_ANONYMOUS) && flag.has_room_feature(jid, Xep.Muc.Feature.MEMBERS_ONLY);
}
public bool is_moderated_room(Account account, Jid jid) {
@ -631,7 +636,8 @@ public class MucManager : StreamInteractionModule, Object {
} else if (conversation.active && !conference.autojoin) {
part(account, conference.jid);
}
} else if (conference.autojoin) {
}
if (conference.autojoin) {
join.begin(account, conference.jid, conference.nick, conference.password);
}
conference_added(account, conference);
@ -645,19 +651,13 @@ public class MucManager : StreamInteractionModule, Object {
conference_removed(account, jid);
}
private void on_build_message_stanza(Entities.Message message, Xmpp.MessageStanza message_stanza, Conversation conversation) {
if (conversation.type_ == Conversation.Type.GROUPCHAT_PM) {
Xmpp.Xep.Muc.add_muc_pm_message_stanza_x_node(message_stanza);
}
}
private void self_ping(Account account) {
XmppStream? stream = stream_interactor.get_stream(account);
if (stream == null) return;
if (!mucs_joined.has_key(account)) return;
if (!mucs_todo.has_key(account)) return;
foreach (Jid jid in mucs_joined[account]) {
foreach (Jid jid in mucs_todo[account]) {
bool joined = false;
@ -666,7 +666,7 @@ public class MucManager : StreamInteractionModule, Object {
});
Timeout.add_seconds(10, () => {
if (joined || !mucs_joined.has_key(account) || stream_interactor.get_stream(account) != stream) return false;
if (joined || !mucs_todo.has_key(account) || stream_interactor.get_stream(account) != stream) return false;
join.begin(account, jid.bare_jid, jid.resourcepart, null, true);
return false;

View File

@ -64,7 +64,7 @@ public class Dino.Reactions : StreamInteractionModule, Object {
// The MUC server needs to 1) support stable stanza ids 2) either support occupant ids or be a private room (where we know real jids)
var entity_info = stream_interactor.get_module(EntityInfo.IDENTITY);
bool server_supports_sid = (entity_info.has_feature_cached(conversation.account, conversation.counterpart.bare_jid, Xep.UniqueStableStanzaIDs.NS_URI)) ||
(entity_info.has_feature_cached(conversation.account, conversation.counterpart.bare_jid, Xmpp.MessageArchiveManagement.NS_URI));
(entity_info.has_feature_cached(conversation.account, conversation.counterpart.bare_jid, Xmpp.MessageArchiveManagement.NS_URI_2));
if (!server_supports_sid) return false;
bool? supports_occupant_ids = entity_info.has_feature_cached(conversation.account, conversation.counterpart, Xep.OccupantIds.NS_URI);

View File

@ -71,12 +71,6 @@ public class Register : StreamInteractionModule, Object{
return ret;
}
public async string? change_password(Account account, string new_pw){
XmppStream stream = stream_interactor.get_stream(account);
if (stream == null) return null;
return (yield stream.get_module(Xep.InBandRegistration.Module.IDENTITY).change_password(stream, account.full_jid, new_pw)).condition;
}
public class ServerAvailabilityReturn {
public bool available { get; set; }
public TlsCertificateFlags? error_flags { get; set; }
@ -141,7 +135,6 @@ public class Register : StreamInteractionModule, Object{
Gee.List<XmppStreamModule> list = new ArrayList<XmppStreamModule>();
list.add(new Iq.Module());
list.add(new Xep.InBandRegistration.Module());
list.add(new Xep.BitsOfBinary.Module());
XmppStreamResult stream_result = yield Xmpp.establish_stream(jid.domain_jid, list, Application.print_xmpp,
(peer_cert, errors) => { return ConnectionManager.on_invalid_certificate(jid.domainpart, peer_cert, errors); }

View File

@ -38,6 +38,17 @@ public class Dino.Replies : StreamInteractionModule, Object {
return null;
}
public void set_message_is_reply_to(Message message, ContentItem reply_to) {
message.quoted_item_id = reply_to.id;
db.reply.upsert()
.value(db.reply.message_id, message.id, true)
.value(db.reply.quoted_content_item_id, reply_to.id)
.value_null(db.reply.quoted_message_stanza_id)
.value_null(db.reply.quoted_message_from)
.perform();
}
private void on_incoming_message(Entities.Message message, Xmpp.MessageStanza stanza, Conversation conversation) {
// Check if a previous message was in reply to this one
var reply_qry = db.reply.select();
@ -56,7 +67,7 @@ public class Dino.Replies : StreamInteractionModule, Object {
ContentItem? message_item = stream_interactor.get_module(ContentItemStore.IDENTITY).get_item_by_foreign(conversation, 1, message.id);
Message? reply_message = stream_interactor.get_module(MessageStorage.IDENTITY).get_message_by_id(reply_row[db.message.id], conversation);
if (message_item != null && reply_message != null) {
reply_message.set_quoted_item(message_item.id);
set_message_is_reply_to(reply_message, message_item);
}
}
@ -67,7 +78,7 @@ public class Dino.Replies : StreamInteractionModule, Object {
ContentItem? quoted_content_item = stream_interactor.get_module(ContentItemStore.IDENTITY).get_content_item_for_message_id(conversation, reply_to.to_message_id);
if (quoted_content_item == null) return;
message.set_quoted_item(quoted_content_item.id);
set_message_is_reply_to(message, quoted_content_item);
}
private class ReceivedMessageListener : MessageListener {
@ -94,8 +105,7 @@ namespace Dino {
string body = message.body;
foreach (var fallback in message.get_fallbacks()) {
if (fallback.ns_uri == Xep.Replies.NS_URI && message.quoted_item_id > 0) {
body = body[0:body.index_of_nth_char(fallback.locations[0].from_char)] +
body[body.index_of_nth_char(fallback.locations[0].to_char):body.length];
body = body[0:fallback.locations[0].from_char] + body[fallback.locations[0].to_char:body.length];
}
}
return body;

View File

@ -133,7 +133,6 @@ public class RosterStoreImpl : Roster.Storage, Object {
.value(db.roster.jid, item.jid.to_string(), true)
.value(db.roster.handle, item.name)
.value(db.roster.subscription, item.subscription)
.value(db.roster.ask, item.ask)
.perform();
}

View File

@ -1,80 +0,0 @@
using Gdk;
using GLib;
using Gee;
using Xmpp;
using Xmpp.Xep;
using Dino.Entities;
namespace Dino {
public interface FileMetadataProvider : Object {
public abstract bool supports_file(File file);
public abstract async void fill_metadata(File file, Xep.FileMetadataElement.FileMetadata metadata);
}
class GenericFileMetadataProvider: Dino.FileMetadataProvider, Object {
public bool supports_file(File file) {
return true;
}
public async void fill_metadata(File file, Xep.FileMetadataElement.FileMetadata metadata) {
FileInfo info = file.query_info("*", FileQueryInfoFlags.NONE);
metadata.name = info.get_display_name();
metadata.mime_type = info.get_content_type();
metadata.size = info.get_size();
metadata.date = info.get_modification_date_time();
var checksum_types = new ArrayList<ChecksumType>.wrap(new ChecksumType[] { ChecksumType.SHA256, ChecksumType.SHA512 });
var file_hashes = yield compute_file_hashes(file, checksum_types);
metadata.hashes.add(new CryptographicHashes.Hash.with_checksum(ChecksumType.SHA256, file_hashes[ChecksumType.SHA256]));
metadata.hashes.add(new CryptographicHashes.Hash.with_checksum(ChecksumType.SHA512, file_hashes[ChecksumType.SHA512]));
}
}
public class ImageFileMetadataProvider: Dino.FileMetadataProvider, Object {
public bool supports_file(File file) {
string mime_type = file.query_info("*", FileQueryInfoFlags.NONE).get_content_type();
return Dino.Util.is_pixbuf_supported_mime_type(mime_type);
}
private const int[] THUMBNAIL_DIMS = { 1, 2, 3, 4, 8 };
private const string IMAGE_TYPE = "png";
private const string MIME_TYPE = "image/png";
public async void fill_metadata(File file, Xep.FileMetadataElement.FileMetadata metadata) {
Pixbuf pixbuf = new Pixbuf.from_stream(yield file.read_async());
metadata.width = pixbuf.get_width();
metadata.height = pixbuf.get_height();
float ratio = (float)metadata.width / (float) metadata.height;
int thumbnail_width = -1;
int thumbnail_height = -1;
float diff = float.INFINITY;
for (int i = 0; i < THUMBNAIL_DIMS.length; i++) {
int test_width = THUMBNAIL_DIMS[i];
int test_height = THUMBNAIL_DIMS[THUMBNAIL_DIMS.length - 1 - i];
float test_ratio = (float)test_width / (float)test_height;
float test_diff = (test_ratio - ratio).abs();
if (test_diff < diff) {
thumbnail_width = test_width;
thumbnail_height = test_height;
diff = test_diff;
}
}
Pixbuf thumbnail_pixbuf = pixbuf.scale_simple(thumbnail_width, thumbnail_height, InterpType.BILINEAR);
uint8[] buffer;
thumbnail_pixbuf.save_to_buffer(out buffer, IMAGE_TYPE);
var thumbnail = new Xep.JingleContentThumbnails.Thumbnail();
thumbnail.data = new Bytes.take(buffer);
thumbnail.media_type = MIME_TYPE;
thumbnail.width = thumbnail_width;
thumbnail.height = thumbnail_height;
metadata.thumbnails.add(thumbnail);
}
}
}

Some files were not shown because too many files have changed in this diff Show More