Compare commits

..

No commits in common. "master" and "v0.3.0" have entirely different histories.

618 changed files with 46399 additions and 73989 deletions

View File

@ -1,17 +0,0 @@
{
"problemMatcher": [
{
"owner": "gcc-problem-matcher",
"pattern": [
{
"regexp": "^(.*?):(\\d+):(\\d*):?\\s+(?:fatal\\s+)?(warning|error):\\s+(.*)$",
"file": 1,
"line": 2,
"column": 3,
"severity": 4,
"message": 5
}
]
}
]
}

View File

@ -1,17 +0,0 @@
{
"problemMatcher": [
{
"owner": "meson-problem-matcher",
"pattern": [
{
"regexp": "^(.*?)?:(\\d+)?:(\\d+)?: (WARNING|ERROR):\\s+(.*)$",
"file": 1,
"line": 2,
"column": 3,
"severity": 4,
"message": 5
}
]
}
]
}

View File

@ -1,17 +0,0 @@
{
"problemMatcher": [
{
"owner": "vala-problem-matcher",
"pattern": [
{
"regexp": "^(?:../)?(.*?):(\\d+).(\\d+)-\\d+.\\d+:?\\s+(?:fatal\\s+)?(warning|error):\\s+(.*)$",
"file": 1,
"line": 2,
"column": 3,
"severity": 4,
"message": 5
}
]
}
]
}

View File

@ -2,42 +2,12 @@ name: Build
on: [pull_request, push]
jobs:
build:
name: "Build"
runs-on: ubuntu-24.04
runs-on: ubuntu-20.04
steps:
- name: "Checkout sources"
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: "Setup matchers"
run: |
echo '::add-matcher::${{ github.workspace }}/.github/matchers/gcc-problem-matcher.json'
echo '::add-matcher::${{ github.workspace }}/.github/matchers/vala-problem-matcher.json'
echo '::add-matcher::${{ github.workspace }}/.github/matchers/meson-problem-matcher.json'
- name: "Setup dependencies"
run: |
sudo apt-get update
sudo apt-get remove libunwind-14-dev
sudo apt-get install -y build-essential gettext libadwaita-1-dev libcanberra-dev libgcrypt20-dev libgee-0.8-dev libgpgme-dev libgstreamer-plugins-base1.0-dev libgstreamer1.0-dev libgtk-4-dev libnice-dev libnotify-dev libqrencode-dev libsignal-protocol-c-dev libsoup-3.0-dev libsqlite3-dev libsrtp2-dev libwebrtc-audio-processing-dev meson valac
- name: "Configure"
run: meson setup build
- name: "Build"
run: meson compile -C build
- name: "Test"
run: meson test -C build
build-flatpak:
name: "Build flatpak"
runs-on: ubuntu-24.04
container:
image: bilelmoussaoui/flatpak-github-actions:gnome-46
options: --privileged
steps:
- name: "Checkout sources"
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: "Build"
uses: flathub-infra/flatpak-github-actions/flatpak-builder@master
with:
manifest-path: im.dino.Dino.json
bundle: im.dino.Dino.flatpak
- uses: actions/checkout@v2
- run: sudo apt-get update
- run: sudo apt-get install -y build-essential gettext cmake valac libgee-0.8-dev libsqlite3-dev libgtk-3-dev libnotify-dev libgpgme-dev libsoup2.4-dev libgcrypt20-dev libqrencode-dev libgspell-1-dev libnice-dev libgstreamer1.0-dev libgstreamer-plugins-base1.0-dev libsrtp2-dev libwebrtc-audio-processing-dev
- run: ./configure --with-tests --with-libsignal-in-tree
- run: make
- run: build/xmpp-vala-test
- run: build/signal-protocol-vala-test

4
.gitmodules vendored Normal file
View File

@ -0,0 +1,4 @@
[submodule "libsignal-protocol-c"]
path = plugins/signal-protocol/libsignal-protocol-c
url = https://github.com/WhisperSystems/libsignal-protocol-c.git
branch = v2.3.3

212
CMakeLists.txt Normal file
View File

@ -0,0 +1,212 @@
cmake_minimum_required(VERSION 3.3)
list(APPEND CMAKE_MODULE_PATH ${CMAKE_SOURCE_DIR}/cmake)
include(ComputeVersion)
if (NOT VERSION_FOUND)
project(Dino LANGUAGES C CXX)
elseif (VERSION_IS_RELEASE)
project(Dino VERSION ${VERSION_FULL} LANGUAGES C CXX)
else ()
project(Dino LANGUAGES C CXX)
set(PROJECT_VERSION ${VERSION_FULL})
endif ()
# Prepare Plugins
set(DEFAULT_PLUGINS omemo;openpgp;http-files;ice;rtp)
foreach (plugin ${DEFAULT_PLUGINS})
if ("$CACHE{DINO_PLUGIN_ENABLED_${plugin}}" STREQUAL "")
if (NOT DEFINED DINO_PLUGIN_ENABLED_${plugin}})
set(DINO_PLUGIN_ENABLED_${plugin} "yes" CACHE BOOL "Enable plugin ${plugin}")
else ()
set(DINO_PLUGIN_ENABLED_${plugin} "${DINO_PLUGIN_ENABLED_${plugin}}" CACHE BOOL "Enable plugin ${plugin}" FORCE)
endif ()
if (DINO_PLUGIN_ENABLED_${plugin})
message(STATUS "Enabled plugin: ${plugin}")
else ()
message(STATUS "Disabled plugin: ${plugin}")
endif ()
endif ()
endforeach (plugin)
if (DISABLED_PLUGINS)
foreach(plugin ${DISABLED_PLUGINS})
set(DINO_PLUGIN_ENABLED_${plugin} "no" CACHE BOOL "Enable plugin ${plugin}" FORCE)
message(STATUS "Disabled plugin: ${plugin}")
endforeach(plugin)
endif (DISABLED_PLUGINS)
if (ENABLED_PLUGINS)
foreach(plugin ${ENABLED_PLUGINS})
set(DINO_PLUGIN_ENABLED_${plugin} "yes" CACHE BOOL "Enable plugin ${plugin}" FORCE)
message(STATUS "Enabled plugin: ${plugin}")
endforeach(plugin)
endif (ENABLED_PLUGINS)
set(PLUGINS "")
get_cmake_property(all_variables VARIABLES)
foreach (variable_name ${all_variables})
if (variable_name MATCHES "^DINO_PLUGIN_ENABLED_(.+)$" AND ${variable_name})
list(APPEND PLUGINS ${CMAKE_MATCH_1})
endif()
endforeach ()
list(SORT PLUGINS)
string(REPLACE ";" ", " PLUGINS_TEXT "${PLUGINS}")
message(STATUS "Configuring Dino ${PROJECT_VERSION} with plugins: ${PLUGINS_TEXT}")
# Prepare instal paths
macro(set_path what val desc)
if (NOT ${what})
unset(${what} CACHE)
set(${what} ${val})
endif ()
if (NOT "${${what}}" STREQUAL "${_${what}_SET}")
message(STATUS "${desc}: ${${what}}")
set(_${what}_SET ${${what}} CACHE INTERNAL ${desc})
endif()
endmacro(set_path)
string(REGEX REPLACE "^liblib" "lib" LIBDIR_NAME "lib${LIB_SUFFIX}")
set_path(CMAKE_INSTALL_PREFIX "${CMAKE_INSTALL_PREFIX}" "Installation directory for architecture-independent files")
set_path(EXEC_INSTALL_PREFIX "${CMAKE_INSTALL_PREFIX}" "Installation directory for architecture-dependent files")
set_path(SHARE_INSTALL_PREFIX "${CMAKE_INSTALL_PREFIX}/share" "Installation directory for read-only architecture-independent data")
set_path(BIN_INSTALL_DIR "${EXEC_INSTALL_PREFIX}/bin" "Installation directory for user executables")
set_path(DATA_INSTALL_DIR "${SHARE_INSTALL_PREFIX}/dino" "Installation directory for dino-specific data")
set_path(APPDATA_FILE_INSTALL_DIR "${SHARE_INSTALL_PREFIX}/metainfo" "Installation directory for .appdata.xml files")
set_path(DESKTOP_FILE_INSTALL_DIR "${SHARE_INSTALL_PREFIX}/applications" "Installation directory for .desktop files")
set_path(SERVICE_FILE_INSTALL_DIR "${SHARE_INSTALL_PREFIX}/dbus-1/services" "Installation directory for .service files")
set_path(ICON_INSTALL_DIR "${SHARE_INSTALL_PREFIX}/icons" "Installation directory for icons")
set_path(INCLUDE_INSTALL_DIR "${EXEC_INSTALL_PREFIX}/include" "Installation directory for C header files")
set_path(LIB_INSTALL_DIR "${EXEC_INSTALL_PREFIX}/${LIBDIR_NAME}" "Installation directory for object code libraries")
set_path(LOCALE_INSTALL_DIR "${SHARE_INSTALL_PREFIX}/locale" "Installation directory for locale files")
set_path(PLUGIN_INSTALL_DIR "${LIB_INSTALL_DIR}/dino/plugins" "Installation directory for dino plugin object code files")
set_path(VAPI_INSTALL_DIR "${SHARE_INSTALL_PREFIX}/vala/vapi" "Installation directory for Vala API files")
set(TARGET_INSTALL LIBRARY DESTINATION ${LIB_INSTALL_DIR} RUNTIME DESTINATION ${BIN_INSTALL_DIR} PUBLIC_HEADER DESTINATION ${INCLUDE_INSTALL_DIR} ARCHIVE DESTINATION ${LIB_INSTALL_DIR})
set(PLUGIN_INSTALL LIBRARY DESTINATION ${PLUGIN_INSTALL_DIR} RUNTIME DESTINATION ${PLUGIN_INSTALL_DIR})
include(CheckCCompilerFlag)
include(CheckCSourceCompiles)
macro(AddCFlagIfSupported list flag)
string(REGEX REPLACE "[^a-z^A-Z^_^0-9]+" "_" flag_name ${flag})
check_c_compiler_flag(${flag} COMPILER_SUPPORTS${flag_name})
if (${COMPILER_SUPPORTS${flag_name}})
set(${list} "${${list}} ${flag}")
endif ()
endmacro()
if ("Ninja" STREQUAL ${CMAKE_GENERATOR})
AddCFlagIfSupported(CMAKE_C_FLAGS -fdiagnostics-color)
endif ()
# Flags for all C files
AddCFlagIfSupported(CMAKE_C_FLAGS -Wall)
AddCFlagIfSupported(CMAKE_C_FLAGS -Wextra)
AddCFlagIfSupported(CMAKE_C_FLAGS -Werror=format-security)
AddCFlagIfSupported(CMAKE_C_FLAGS -Wno-duplicate-decl-specifier)
AddCFlagIfSupported(CMAKE_C_FLAGS -fno-omit-frame-pointer)
if (NOT VALA_WARN)
set(VALA_WARN "conversion")
endif ()
set(VALA_WARN "${VALA_WARN}" CACHE STRING "Which warnings to show when invoking C compiler on Vala compiler output")
set_property(CACHE VALA_WARN PROPERTY STRINGS "all;unused;qualifier;conversion;deprecated;format;none")
# Vala generates some unused stuff
if (NOT ("all" IN_LIST VALA_WARN OR "unused" IN_LIST VALA_WARN))
AddCFlagIfSupported(VALA_CFLAGS -Wno-unused-but-set-variable)
AddCFlagIfSupported(VALA_CFLAGS -Wno-unused-function)
AddCFlagIfSupported(VALA_CFLAGS -Wno-unused-label)
AddCFlagIfSupported(VALA_CFLAGS -Wno-unused-parameter)
AddCFlagIfSupported(VALA_CFLAGS -Wno-unused-value)
AddCFlagIfSupported(VALA_CFLAGS -Wno-unused-variable)
endif ()
if (NOT ("all" IN_LIST VALA_WARN OR "qualifier" IN_LIST VALA_WARN))
AddCFlagIfSupported(VALA_CFLAGS -Wno-discarded-qualifiers)
AddCFlagIfSupported(VALA_CFLAGS -Wno-discarded-array-qualifiers)
AddCFlagIfSupported(VALA_CFLAGS -Wno-incompatible-pointer-types-discards-qualifiers)
endif ()
if (NOT ("all" IN_LIST VALA_WARN OR "deprecated" IN_LIST VALA_WARN))
AddCFlagIfSupported(VALA_CFLAGS -Wno-deprecated-declarations)
endif ()
if (NOT ("all" IN_LIST VALA_WARN OR "format" IN_LIST VALA_WARN))
AddCFlagIfSupported(VALA_CFLAGS -Wno-missing-braces)
endif ()
if (NOT ("all" IN_LIST VALA_WARN OR "conversion" IN_LIST VALA_WARN))
AddCFlagIfSupported(VALA_CFLAGS -Wno-int-conversion)
AddCFlagIfSupported(VALA_CFLAGS -Wno-pointer-sign)
AddCFlagIfSupported(VALA_CFLAGS -Wno-incompatible-pointer-types)
endif ()
try_compile(__WITHOUT_FILE_OFFSET_BITS_64 ${CMAKE_CURRENT_BINARY_DIR} ${CMAKE_SOURCE_DIR}/cmake/LargeFileOffsets.c COMPILE_DEFINITIONS ${CMAKE_REQUIRED_DEFINITIONS})
if (NOT __WITHOUT_FILE_OFFSET_BITS_64)
try_compile(__WITH_FILE_OFFSET_BITS_64 ${CMAKE_CURRENT_BINARY_DIR} ${CMAKE_SOURCE_DIR}/cmake/LargeFileOffsets.c COMPILE_DEFINITIONS ${CMAKE_REQUIRED_DEFINITIONS} -D_FILE_OFFSET_BITS=64)
if (__WITH_FILE_OFFSET_BITS_64)
AddCFlagIfSupported(CMAKE_C_FLAGS -D_FILE_OFFSET_BITS=64)
message(STATUS "Enabled large file support using _FILE_OFFSET_BITS=64")
else (__WITH_FILE_OFFSET_BITS_64)
message(STATUS "Large file support not available")
endif (__WITH_FILE_OFFSET_BITS_64)
unset(__WITH_FILE_OFFSET_BITS_64)
endif (NOT __WITHOUT_FILE_OFFSET_BITS_64)
unset(__WITHOUT_FILE_OFFSET_BITS_64)
if ($ENV{USE_CCACHE})
# Configure CCache if available
find_program(CCACHE_BIN ccache)
mark_as_advanced(CCACHE_BIN)
if (CCACHE_BIN)
message(STATUS "Using ccache")
set_property(GLOBAL PROPERTY RULE_LAUNCH_COMPILE ${CCACHE_BIN})
set_property(GLOBAL PROPERTY RULE_LAUNCH_LINK ${CCACHE_BIN})
else (CCACHE_BIN)
message(STATUS "USE_CCACHE was set but ccache was not found")
endif (CCACHE_BIN)
endif ($ENV{USE_CCACHE})
if (NOT NO_DEBUG)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -g")
set(CMAKE_VALA_FLAGS "${CMAKE_VALA_FLAGS} -g")
endif (NOT NO_DEBUG)
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR})
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR})
set(GTK3_GLOBAL_VERSION 3.22)
set(GLib_GLOBAL_VERSION 2.38)
set(ICU_GLOBAL_VERSION 57)
if (NOT VALA_EXECUTABLE)
unset(VALA_EXECUTABLE CACHE)
endif ()
find_package(Vala 0.34 REQUIRED)
if (VALA_VERSION VERSION_GREATER "0.34.90" AND VALA_VERSION VERSION_LESS "0.36.1" OR # Due to a bug on 0.36.0 (and pre-releases), we need to disable FAST_VAPI
VALA_VERSION VERSION_EQUAL "0.44.10" OR VALA_VERSION VERSION_EQUAL "0.46.4" OR VALA_VERSION VERSION_EQUAL "0.47.1" OR # See Dino issue #646
VALA_VERSION VERSION_EQUAL "0.40.21" OR VALA_VERSION VERSION_EQUAL "0.46.8" OR VALA_VERSION VERSION_EQUAL "0.48.4") # See Dino issue #816
set(DISABLE_FAST_VAPI yes)
endif ()
include(${VALA_USE_FILE})
include(MultiFind)
include(GlibCompileResourcesSupport)
set(CMAKE_VALA_FLAGS "${CMAKE_VALA_FLAGS} --target-glib=${GLib_GLOBAL_VERSION}")
add_subdirectory(qlite)
add_subdirectory(xmpp-vala)
add_subdirectory(libdino)
add_subdirectory(main)
add_subdirectory(crypto-vala)
add_subdirectory(plugins)
# uninstall target
configure_file("${CMAKE_SOURCE_DIR}/cmake/cmake_uninstall.cmake.in" "${CMAKE_BINARY_DIR}/cmake_uninstall.cmake" IMMEDIATE @ONLY)
add_custom_target(uninstall COMMAND ${CMAKE_COMMAND} -P ${CMAKE_BINARY_DIR}/cmake_uninstall.cmake COMMENT "Uninstall the project...")

View File

@ -1,11 +1,7 @@
<img src="https://dino.im/img/logo.svg" width="80">
![Dino](https://dino.im/img/readme_header.svg)
=======
# Dino
Dino is an XMPP messaging app for Linux using GTK and Vala.
It supports calls, encryption, file transfers, group chats and more.
![screenshot](https://dino.im/img/appdata/screenshot-dino-0.4-main-2244x1644@2.png)
![screenshots](https://dino.im/img/screenshot-main.png)
Installation
------------
@ -15,9 +11,9 @@ Build
-----
Make sure to install all [dependencies](https://github.com/dino/dino/wiki/Build#dependencies).
meson setup build
meson compile -C build
build/main/dino
./configure
make
build/dino
Resources
---------
@ -34,8 +30,8 @@ Contribute
License
-------
Dino - XMPP messaging app using GTK/Vala
Copyright (C) 2016-2025 Dino contributors
Dino - Modern Jabber/XMPP Client using GTK+/Vala
Copyright (C) 2016-2022 Dino contributors
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by

View File

@ -0,0 +1,57 @@
# This file is used to be invoked at build time. It generates the needed
# resource XML file.
# Input variables that need to provided when invoking this script:
# GXML_OUTPUT The output file path where to save the XML file.
# GXML_COMPRESS_ALL Sets all COMPRESS flags in all resources in resource
# list.
# GXML_NO_COMPRESS_ALL Removes all COMPRESS flags in all resources in
# resource list.
# GXML_STRIPBLANKS_ALL Sets all STRIPBLANKS flags in all resources in
# resource list.
# GXML_NO_STRIPBLANKS_ALL Removes all STRIPBLANKS flags in all resources in
# resource list.
# GXML_TOPIXDATA_ALL Sets all TOPIXDATA flags i nall resources in resource
# list.
# GXML_NO_TOPIXDATA_ALL Removes all TOPIXDATA flags in all resources in
# resource list.
# GXML_PREFIX Overrides the resource prefix that is prepended to
# each relative name in registered resources.
# GXML_RESOURCES The list of resource files. Whether absolute or
# relative path is equal.
# Include the GENERATE_GXML() function.
include(${CMAKE_CURRENT_LIST_DIR}/GenerateGXML.cmake)
# Set flags to actual invocation flags.
if(GXML_COMPRESS_ALL)
set(GXML_COMPRESS_ALL COMPRESS_ALL)
endif()
if(GXML_NO_COMPRESS_ALL)
set(GXML_NO_COMPRESS_ALL NO_COMPRESS_ALL)
endif()
if(GXML_STRIPBLANKS_ALL)
set(GXML_STRIPBLANKS_ALL STRIPBLANKS_ALL)
endif()
if(GXML_NO_STRIPBLANKS_ALL)
set(GXML_NO_STRIPBLANKS_ALL NO_STRIPBLANKS_ALL)
endif()
if(GXML_TOPIXDATA_ALL)
set(GXML_TOPIXDATA_ALL TOPIXDATA_ALL)
endif()
if(GXML_NO_TOPIXDATA_ALL)
set(GXML_NO_TOPIXDATA_ALL NO_TOPIXDATA_ALL)
endif()
# Replace " " with ";" to import the list over the command line. Otherwise
# CMake would interprete the passed resources as a whole string.
string(REPLACE " " ";" GXML_RESOURCES ${GXML_RESOURCES})
# Invoke the gresource XML generation function.
generate_gxml(${GXML_OUTPUT}
${GXML_COMPRESS_ALL} ${GXML_NO_COMPRESS_ALL}
${GXML_STRIPBLANKS_ALL} ${GXML_NO_STRIPBLANKS_ALL}
${GXML_TOPIXDATA_ALL} ${GXML_NO_TOPIXDATA_ALL}
PREFIX ${GXML_PREFIX}
RESOURCES ${GXML_RESOURCES})

View File

@ -0,0 +1,221 @@
include(CMakeParseArguments)
# Path to this file.
set(GCR_CMAKE_MACRO_DIR ${CMAKE_CURRENT_LIST_DIR})
# Compiles a gresource resource file from given resource files. Automatically
# creates the XML controlling file.
# The type of resource to generate (header, c-file or bundle) is automatically
# determined from TARGET file ending, if no TYPE is explicitly specified.
# The output file is stored in the provided variable "output".
# "xml_out" contains the variable where to output the XML path. Can be used to
# create custom targets or doing postprocessing.
# If you want to use preprocessing, you need to manually check the existence
# of the tools you use. This function doesn't check this for you, it just
# generates the XML file. glib-compile-resources will then throw a
# warning/error.
function(COMPILE_GRESOURCES output xml_out)
# Available options:
# COMPRESS_ALL, NO_COMPRESS_ALL Overrides the COMPRESS flag in all
# registered resources.
# STRIPBLANKS_ALL, NO_STRIPBLANKS_ALL Overrides the STRIPBLANKS flag in all
# registered resources.
# TOPIXDATA_ALL, NO_TOPIXDATA_ALL Overrides the TOPIXDATA flag in all
# registered resources.
set(CG_OPTIONS COMPRESS_ALL NO_COMPRESS_ALL
STRIPBLANKS_ALL NO_STRIPBLANKS_ALL
TOPIXDATA_ALL NO_TOPIXDATA_ALL)
# Available one value options:
# TYPE Type of resource to create. Valid options are:
# EMBED_C: A C-file that can be compiled with your project.
# EMBED_H: A header that can be included into your project.
# BUNDLE: Generates a resource bundle file that can be loaded
# at runtime.
# AUTO: Determine from target file ending. Need to specify
# target argument.
# PREFIX Overrides the resource prefix that is prepended to each
# relative file name in registered resources.
# SOURCE_DIR Overrides the resources base directory to search for resources.
# Normally this is set to the source directory with that CMake
# was invoked (CMAKE_SOURCE_DIR).
# TARGET Overrides the name of the output file/-s. Normally the output
# names from glib-compile-resources tool is taken.
set(CG_ONEVALUEARGS TYPE PREFIX SOURCE_DIR TARGET)
# Available multi-value options:
# RESOURCES The list of resource files. Whether absolute or relative path is
# equal, absolute paths are stripped down to relative ones. If the
# absolute path is not inside the given base directory SOURCE_DIR
# or CMAKE_SOURCE_DIR (if SOURCE_DIR is not overriden), this
# function aborts.
# OPTIONS Extra command line options passed to glib-compile-resources.
set(CG_MULTIVALUEARGS RESOURCES OPTIONS)
# Parse the arguments.
cmake_parse_arguments(CG_ARG
"${CG_OPTIONS}"
"${CG_ONEVALUEARGS}"
"${CG_MULTIVALUEARGS}"
"${ARGN}")
# Variable to store the double-quote (") string. Since escaping
# double-quotes in strings is not possible we need a helper variable that
# does this job for us.
set(Q \")
# Check invocation validity with the <prefix>_UNPARSED_ARGUMENTS variable.
# If other not recognized parameters were passed, throw error.
if (CG_ARG_UNPARSED_ARGUMENTS)
set(CG_WARNMSG "Invocation of COMPILE_GRESOURCES with unrecognized")
set(CG_WARNMSG "${CG_WARNMSG} parameters. Parameters are:")
set(CG_WARNMSG "${CG_WARNMSG} ${CG_ARG_UNPARSED_ARGUMENTS}.")
message(WARNING ${CG_WARNMSG})
endif()
# Check invocation validity depending on generation mode (EMBED_C, EMBED_H
# or BUNDLE).
if ("${CG_ARG_TYPE}" STREQUAL "EMBED_C")
# EMBED_C mode, output compilable C-file.
set(CG_GENERATE_COMMAND_LINE "--generate-source")
set(CG_TARGET_FILE_ENDING "c")
elseif ("${CG_ARG_TYPE}" STREQUAL "EMBED_H")
# EMBED_H mode, output includable header file.
set(CG_GENERATE_COMMAND_LINE "--generate-header")
set(CG_TARGET_FILE_ENDING "h")
elseif ("${CG_ARG_TYPE}" STREQUAL "BUNDLE")
# BUNDLE mode, output resource bundle. Don't do anything since
# glib-compile-resources outputs a bundle when not specifying
# something else.
set(CG_TARGET_FILE_ENDING "gresource")
else()
# Everything else is AUTO mode, determine from target file ending.
if (CG_ARG_TARGET)
set(CG_GENERATE_COMMAND_LINE "--generate")
else()
set(CG_ERRMSG "AUTO mode given, but no target specified. Can't")
set(CG_ERRMSG "${CG_ERRMSG} determine output type. In function")
set(CG_ERRMSG "${CG_ERRMSG} COMPILE_GRESOURCES.")
message(FATAL_ERROR ${CG_ERRMSG})
endif()
endif()
# Check flag validity.
if (CG_ARG_COMPRESS_ALL AND CG_ARG_NO_COMPRESS_ALL)
set(CG_ERRMSG "COMPRESS_ALL and NO_COMPRESS_ALL simultaneously set. In")
set(CG_ERRMSG "${CG_ERRMSG} function COMPILE_GRESOURCES.")
message(FATAL_ERROR ${CG_ERRMSG})
endif()
if (CG_ARG_STRIPBLANKS_ALL AND CG_ARG_NO_STRIPBLANKS_ALL)
set(CG_ERRMSG "STRIPBLANKS_ALL and NO_STRIPBLANKS_ALL simultaneously")
set(CG_ERRMSG "${CG_ERRMSG} set. In function COMPILE_GRESOURCES.")
message(FATAL_ERROR ${CG_ERRMSG})
endif()
if (CG_ARG_TOPIXDATA_ALL AND CG_ARG_NO_TOPIXDATA_ALL)
set(CG_ERRMSG "TOPIXDATA_ALL and NO_TOPIXDATA_ALL simultaneously set.")
set(CG_ERRMSG "${CG_ERRMSG} In function COMPILE_GRESOURCES.")
message(FATAL_ERROR ${CG_ERRMSG})
endif()
# Check if there are any resources.
if (NOT CG_ARG_RESOURCES)
set(CG_ERRMSG "No resource files to process. In function")
set(CG_ERRMSG "${CG_ERRMSG} COMPILE_GRESOURCES.")
message(FATAL_ERROR ${CG_ERRMSG})
endif()
# Extract all dependencies for targets from resource list.
foreach(res ${CG_ARG_RESOURCES})
if (NOT(("${res}" STREQUAL "COMPRESS") OR
("${res}" STREQUAL "STRIPBLANKS") OR
("${res}" STREQUAL "TOPIXDATA")))
add_custom_command(
OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/resources/${res}"
COMMAND ${CMAKE_COMMAND} -E copy "${CG_ARG_SOURCE_DIR}/${res}" "${CMAKE_CURRENT_BINARY_DIR}/resources/${res}"
MAIN_DEPENDENCY "${CG_ARG_SOURCE_DIR}/${res}")
list(APPEND CG_RESOURCES_DEPENDENCIES "${CMAKE_CURRENT_BINARY_DIR}/resources/${res}")
endif()
endforeach()
# Construct .gresource.xml path.
set(CG_XML_FILE_PATH "${CMAKE_CURRENT_BINARY_DIR}/resources/.gresource.xml")
# Generate gresources XML target.
list(APPEND CG_CMAKE_SCRIPT_ARGS "-D")
list(APPEND CG_CMAKE_SCRIPT_ARGS "GXML_OUTPUT=${Q}${CG_XML_FILE_PATH}${Q}")
if(CG_ARG_COMPRESS_ALL)
list(APPEND CG_CMAKE_SCRIPT_ARGS "-D")
list(APPEND CG_CMAKE_SCRIPT_ARGS "GXML_COMPRESS_ALL")
endif()
if(CG_ARG_NO_COMPRESS_ALL)
list(APPEND CG_CMAKE_SCRIPT_ARGS "-D")
list(APPEND CG_CMAKE_SCRIPT_ARGS "GXML_NO_COMPRESS_ALL")
endif()
if(CG_ARG_STRPIBLANKS_ALL)
list(APPEND CG_CMAKE_SCRIPT_ARGS "-D")
list(APPEND CG_CMAKE_SCRIPT_ARGS "GXML_STRIPBLANKS_ALL")
endif()
if(CG_ARG_NO_STRIPBLANKS_ALL)
list(APPEND CG_CMAKE_SCRIPT_ARGS "-D")
list(APPEND CG_CMAKE_SCRIPT_ARGS "GXML_NO_STRIPBLANKS_ALL")
endif()
if(CG_ARG_TOPIXDATA_ALL)
list(APPEND CG_CMAKE_SCRIPT_ARGS "-D")
list(APPEND CG_CMAKE_SCRIPT_ARGS "GXML_TOPIXDATA_ALL")
endif()
if(CG_ARG_NO_TOPIXDATA_ALL)
list(APPEND CG_CMAKE_SCRIPT_ARGS "-D")
list(APPEND CG_CMAKE_SCRIPT_ARGS "GXML_NO_TOPIXDATA_ALL")
endif()
list(APPEND CG_CMAKE_SCRIPT_ARGS "-D")
list(APPEND CG_CMAKE_SCRIPT_ARGS "GXML_PREFIX=${Q}${CG_ARG_PREFIX}${Q}")
list(APPEND CG_CMAKE_SCRIPT_ARGS "-D")
list(APPEND CG_CMAKE_SCRIPT_ARGS
"GXML_RESOURCES=${Q}${CG_ARG_RESOURCES}${Q}")
list(APPEND CG_CMAKE_SCRIPT_ARGS "-P")
list(APPEND CG_CMAKE_SCRIPT_ARGS
"${Q}${GCR_CMAKE_MACRO_DIR}/BuildTargetScript.cmake${Q}")
get_filename_component(CG_XML_FILE_PATH_ONLY_NAME
"${CG_XML_FILE_PATH}" NAME)
set(CG_XML_CUSTOM_COMMAND_COMMENT
"Creating gresources XML file (${CG_XML_FILE_PATH_ONLY_NAME})")
add_custom_command(OUTPUT ${CG_XML_FILE_PATH}
COMMAND ${CMAKE_COMMAND}
ARGS ${CG_CMAKE_SCRIPT_ARGS}
DEPENDS ${CG_RESOURCES_DEPENDENCIES}
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
COMMENT ${CG_XML_CUSTOM_COMMAND_COMMENT})
# Create target manually if not set (to make sure glib-compile-resources
# doesn't change behaviour with it's naming standards).
if (NOT CG_ARG_TARGET)
set(CG_ARG_TARGET "${CMAKE_CURRENT_BINARY_DIR}/resources")
set(CG_ARG_TARGET "${CG_ARG_TARGET}.${CG_TARGET_FILE_ENDING}")
endif()
# Create source directory automatically if not set.
if (NOT CG_ARG_SOURCE_DIR)
set(CG_ARG_SOURCE_DIR "${CMAKE_SOURCE_DIR}")
endif()
# Add compilation target for resources.
add_custom_command(OUTPUT ${CG_ARG_TARGET}
COMMAND ${GLIB_COMPILE_RESOURCES_EXECUTABLE}
ARGS
${OPTIONS}
"--target=${Q}${CG_ARG_TARGET}${Q}"
"--sourcedir=${Q}${CG_ARG_SOURCE_DIR}${Q}"
${CG_GENERATE_COMMAND_LINE}
${CG_XML_FILE_PATH}
MAIN_DEPENDENCY ${CG_XML_FILE_PATH}
DEPENDS ${CG_RESOURCES_DEPENDENCIES}
WORKING_DIRECTORY ${CMAKE_BUILD_DIR})
# Set output and XML_OUT to parent scope.
set(${xml_out} ${CG_XML_FILE_PATH} PARENT_SCOPE)
set(${output} ${CG_ARG_TARGET} PARENT_SCOPE)
endfunction()

105
cmake/ComputeVersion.cmake Normal file
View File

@ -0,0 +1,105 @@
include(CMakeParseArguments)
function(_compute_version_from_file)
set_property(DIRECTORY APPEND PROPERTY CMAKE_CONFIGURE_DEPENDS ${CMAKE_SOURCE_DIR}/VERSION)
if (NOT EXISTS ${CMAKE_SOURCE_DIR}/VERSION)
set(VERSION_FOUND 0 PARENT_SCOPE)
return()
endif ()
file(STRINGS ${CMAKE_SOURCE_DIR}/VERSION VERSION_FILE)
string(REPLACE " " ";" VERSION_FILE "${VERSION_FILE}")
cmake_parse_arguments(VERSION_FILE "" "RELEASE;PRERELEASE" "" ${VERSION_FILE})
if (DEFINED VERSION_FILE_RELEASE)
string(STRIP "${VERSION_FILE_RELEASE}" VERSION_FILE_RELEASE)
set(VERSION_IS_RELEASE 1 PARENT_SCOPE)
set(VERSION_FULL "${VERSION_FILE_RELEASE}" PARENT_SCOPE)
set(VERSION_FOUND 1 PARENT_SCOPE)
elseif (DEFINED VERSION_FILE_PRERELEASE)
string(STRIP "${VERSION_FILE_PRERELEASE}" VERSION_FILE_PRERELEASE)
set(VERSION_IS_RELEASE 0 PARENT_SCOPE)
set(VERSION_FULL "${VERSION_FILE_PRERELEASE}" PARENT_SCOPE)
set(VERSION_FOUND 1 PARENT_SCOPE)
else ()
set(VERSION_FOUND 0 PARENT_SCOPE)
endif ()
endfunction(_compute_version_from_file)
function(_compute_version_from_git)
set_property(DIRECTORY APPEND PROPERTY CMAKE_CONFIGURE_DEPENDS ${CMAKE_SOURCE_DIR}/.git)
if (NOT GIT_EXECUTABLE)
find_package(Git QUIET)
if (NOT GIT_FOUND)
return()
endif ()
endif (NOT GIT_EXECUTABLE)
# Git tag
execute_process(
COMMAND "${GIT_EXECUTABLE}" describe --tags --abbrev=0
WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
RESULT_VARIABLE git_result
OUTPUT_VARIABLE git_tag
ERROR_VARIABLE git_error
OUTPUT_STRIP_TRAILING_WHITESPACE
ERROR_STRIP_TRAILING_WHITESPACE
)
if (NOT git_result EQUAL 0)
return()
endif (NOT git_result EQUAL 0)
if (git_tag MATCHES "^v?([0-9]+[.]?[0-9]*[.]?[0-9]*)(-[.0-9A-Za-z-]+)?([+][.0-9A-Za-z-]+)?$")
set(VERSION_LAST_RELEASE "${CMAKE_MATCH_1}")
else ()
return()
endif ()
# Git describe
execute_process(
COMMAND "${GIT_EXECUTABLE}" describe --tags
WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
RESULT_VARIABLE git_result
OUTPUT_VARIABLE git_describe
ERROR_VARIABLE git_error
OUTPUT_STRIP_TRAILING_WHITESPACE
ERROR_STRIP_TRAILING_WHITESPACE
)
if (NOT git_result EQUAL 0)
return()
endif (NOT git_result EQUAL 0)
if ("${git_tag}" STREQUAL "${git_describe}")
set(VERSION_IS_RELEASE 1)
else ()
set(VERSION_IS_RELEASE 0)
if (git_describe MATCHES "-([0-9]+)-g([0-9a-f]+)$")
set(VERSION_TAG_OFFSET "${CMAKE_MATCH_1}")
set(VERSION_COMMIT_HASH "${CMAKE_MATCH_2}")
endif ()
execute_process(
COMMAND "${GIT_EXECUTABLE}" show --format=%cd --date=format:%Y%m%d -s
WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
RESULT_VARIABLE git_result
OUTPUT_VARIABLE git_time
ERROR_VARIABLE git_error
OUTPUT_STRIP_TRAILING_WHITESPACE
ERROR_STRIP_TRAILING_WHITESPACE
)
if (NOT git_result EQUAL 0)
return()
endif (NOT git_result EQUAL 0)
set(VERSION_COMMIT_DATE "${git_time}")
endif ()
if (NOT VERSION_IS_RELEASE)
set(VERSION_SUFFIX "~git${VERSION_TAG_OFFSET}.${VERSION_COMMIT_DATE}.${VERSION_COMMIT_HASH}")
else (NOT VERSION_IS_RELEASE)
set(VERSION_SUFFIX "")
endif (NOT VERSION_IS_RELEASE)
set(VERSION_IS_RELEASE ${VERSION_IS_RELEASE} PARENT_SCOPE)
set(VERSION_FULL "${VERSION_LAST_RELEASE}${VERSION_SUFFIX}" PARENT_SCOPE)
set(VERSION_FOUND 1 PARENT_SCOPE)
endfunction(_compute_version_from_git)
_compute_version_from_file()
if (NOT VERSION_FOUND)
_compute_version_from_git()
endif (NOT VERSION_FOUND)

31
cmake/FindATK.cmake Normal file
View File

@ -0,0 +1,31 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(ATK
PKG_CONFIG_NAME atk
LIB_NAMES atk-1.0
INCLUDE_NAMES atk/atk.h
INCLUDE_DIR_SUFFIXES atk-1.0 atk-1.0/include
DEPENDS GObject
)
if(ATK_FOUND AND NOT ATK_VERSION)
find_file(ATK_VERSION_HEADER "atk/atkversion.h" HINTS ${ATK_INCLUDE_DIRS})
mark_as_advanced(ATK_VERSION_HEADER)
if(ATK_VERSION_HEADER)
file(STRINGS "${ATK_VERSION_HEADER}" ATK_MAJOR_VERSION REGEX "^#define ATK_MAJOR_VERSION +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define ATK_MAJOR_VERSION \\(?([0-9]+)\\)?$" "\\1" ATK_MAJOR_VERSION "${ATK_MAJOR_VERSION}")
file(STRINGS "${ATK_VERSION_HEADER}" ATK_MINOR_VERSION REGEX "^#define ATK_MINOR_VERSION +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define ATK_MINOR_VERSION \\(?([0-9]+)\\)?$" "\\1" ATK_MINOR_VERSION "${ATK_MINOR_VERSION}")
file(STRINGS "${ATK_VERSION_HEADER}" ATK_MICRO_VERSION REGEX "^#define ATK_MICRO_VERSION +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define ATK_MICRO_VERSION \\(?([0-9]+)\\)?$" "\\1" ATK_MICRO_VERSION "${ATK_MICRO_VERSION}")
set(ATK_VERSION "${ATK_MAJOR_VERSION}.${ATK_MINOR_VERSION}.${ATK_MICRO_VERSION}")
unset(ATK_MAJOR_VERSION)
unset(ATK_MINOR_VERSION)
unset(ATK_MICRO_VERSION)
endif()
endif()
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(ATK
REQUIRED_VARS ATK_LIBRARY
VERSION_VAR ATK_VERSION)

30
cmake/FindCairo.cmake Normal file
View File

@ -0,0 +1,30 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(Cairo
PKG_CONFIG_NAME cairo
LIB_NAMES cairo
INCLUDE_NAMES cairo.h
INCLUDE_DIR_SUFFIXES cairo cairo/include
)
if(Cairo_FOUND AND NOT Cairo_VERSION)
find_file(Cairo_VERSION_HEADER "cairo-version.h" HINTS ${Cairo_INCLUDE_DIRS})
mark_as_advanced(Cairo_VERSION_HEADER)
if(Cairo_VERSION_HEADER)
file(STRINGS "${Cairo_VERSION_HEADER}" Cairo_MAJOR_VERSION REGEX "^#define CAIRO_VERSION_MAJOR +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define CAIRO_VERSION_MAJOR \\(?([0-9]+)\\)?$" "\\1" Cairo_MAJOR_VERSION "${Cairo_MAJOR_VERSION}")
file(STRINGS "${Cairo_VERSION_HEADER}" Cairo_MINOR_VERSION REGEX "^#define CAIRO_VERSION_MINOR +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define CAIRO_VERSION_MINOR \\(?([0-9]+)\\)?$" "\\1" Cairo_MINOR_VERSION "${Cairo_MINOR_VERSION}")
file(STRINGS "${Cairo_VERSION_HEADER}" Cairo_MICRO_VERSION REGEX "^#define CAIRO_VERSION_MICRO +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define CAIRO_VERSION_MICRO \\(?([0-9]+)\\)?$" "\\1" Cairo_MICRO_VERSION "${Cairo_MICRO_VERSION}")
set(Cairo_VERSION "${Cairo_MAJOR_VERSION}.${Cairo_MINOR_VERSION}.${Cairo_MICRO_VERSION}")
unset(Cairo_MAJOR_VERSION)
unset(Cairo_MINOR_VERSION)
unset(Cairo_MICRO_VERSION)
endif()
endif()
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Cairo
REQUIRED_VARS Cairo_LIBRARY
VERSION_VAR Cairo_VERSION)

10
cmake/FindCanberra.cmake Normal file
View File

@ -0,0 +1,10 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(Canberra
PKG_CONFIG_NAME libcanberra
LIB_NAMES canberra
INCLUDE_NAMES canberra.h
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Canberra
REQUIRED_VARS Canberra_LIBRARY)

10
cmake/FindGCrypt.cmake Normal file
View File

@ -0,0 +1,10 @@
include(PkgConfigWithFallbackOnConfigScript)
find_pkg_config_with_fallback_on_config_script(GCrypt
PKG_CONFIG_NAME libgcrypt
CONFIG_SCRIPT_NAME libgcrypt
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(GCrypt
REQUIRED_VARS GCrypt_LIBRARY
VERSION_VAR GCrypt_VERSION)

38
cmake/FindGDK3.cmake Normal file
View File

@ -0,0 +1,38 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(GDK3
PKG_CONFIG_NAME gdk-3.0
LIB_NAMES gdk-3
INCLUDE_NAMES gdk/gdk.h
INCLUDE_DIR_SUFFIXES gtk-3.0 gtk-3.0/include gtk+-3.0 gtk+-3.0/include
DEPENDS Pango Cairo GDKPixbuf2
)
if(GDK3_FOUND AND NOT GDK3_VERSION)
find_file(GDK3_VERSION_HEADER "gdk/gdkversionmacros.h" HINTS ${GDK3_INCLUDE_DIRS})
mark_as_advanced(GDK3_VERSION_HEADER)
if(GDK3_VERSION_HEADER)
file(STRINGS "${GDK3_VERSION_HEADER}" GDK3_MAJOR_VERSION REGEX "^#define GDK_MAJOR_VERSION +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define GDK_MAJOR_VERSION \\(?([0-9]+)\\)?$" "\\1" GDK3_MAJOR_VERSION "${GDK3_MAJOR_VERSION}")
file(STRINGS "${GDK3_VERSION_HEADER}" GDK3_MINOR_VERSION REGEX "^#define GDK_MINOR_VERSION +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define GDK_MINOR_VERSION \\(?([0-9]+)\\)?$" "\\1" GDK3_MINOR_VERSION "${GDK3_MINOR_VERSION}")
file(STRINGS "${GDK3_VERSION_HEADER}" GDK3_MICRO_VERSION REGEX "^#define GDK_MICRO_VERSION +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define GDK_MICRO_VERSION \\(?([0-9]+)\\)?$" "\\1" GDK3_MICRO_VERSION "${GDK3_MICRO_VERSION}")
set(GDK3_VERSION "${GDK3_MAJOR_VERSION}.${GDK3_MINOR_VERSION}.${GDK3_MICRO_VERSION}")
unset(GDK3_MAJOR_VERSION)
unset(GDK3_MINOR_VERSION)
unset(GDK3_MICRO_VERSION)
endif()
endif()
if (GDK3_FOUND)
find_file(GDK3_WITH_X11 "gdk/gdkx.h" HINTS ${GDK3_INCLUDE_DIRS})
if (GDK3_WITH_X11)
set(GDK3_WITH_X11 yes CACHE INTERNAL "Does GDK3 support X11")
endif (GDK3_WITH_X11)
endif ()
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(GDK3
REQUIRED_VARS GDK3_LIBRARY
VERSION_VAR GDK3_VERSION)

View File

@ -0,0 +1,23 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(GDKPixbuf2
PKG_CONFIG_NAME gdk-pixbuf-2.0
LIB_NAMES gdk_pixbuf-2.0
INCLUDE_NAMES gdk-pixbuf/gdk-pixbuf.h
INCLUDE_DIR_SUFFIXES gdk-pixbuf-2.0 gdk-pixbuf-2.0/include
DEPENDS GLib
)
if(GDKPixbuf2_FOUND AND NOT GDKPixbuf2_VERSION)
find_file(GDKPixbuf2_FEATURES_HEADER "gdk-pixbuf/gdk-pixbuf-features.h" HINTS ${GDKPixbuf2_INCLUDE_DIRS})
mark_as_advanced(GDKPixbuf2_FEATURES_HEADER)
if(GDKPixbuf2_FEATURES_HEADER)
file(STRINGS "${GDKPixbuf2_FEATURES_HEADER}" GDKPixbuf2_VERSION REGEX "^#define GDK_PIXBUF_VERSION \\\"[^\\\"]+\\\"")
string(REGEX REPLACE "^#define GDK_PIXBUF_VERSION \\\"([0-9]+)\\.([0-9]+)\\.([0-9]+)\\\"$" "\\1.\\2.\\3" GDKPixbuf2_VERSION "${GDKPixbuf2_VERSION}")
endif()
endif()
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(GDKPixbuf2
REQUIRED_VARS GDKPixbuf2_LIBRARY
VERSION_VAR GDKPixbuf2_VERSION)

18
cmake/FindGIO.cmake Normal file
View File

@ -0,0 +1,18 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(GIO
PKG_CONFIG_NAME gio-2.0
LIB_NAMES gio-2.0
INCLUDE_NAMES gio/gio.h
INCLUDE_DIR_SUFFIXES glib-2.0 glib-2.0/include
DEPENDS GObject
)
if(GIO_FOUND AND NOT GIO_VERSION)
find_package(GLib ${GLib_GLOBAL_VERSION})
set(GIO_VERSION ${GLib_VERSION})
endif()
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(GIO
REQUIRED_VARS GIO_LIBRARY
VERSION_VAR GIO_VERSION)

32
cmake/FindGLib.cmake Normal file
View File

@ -0,0 +1,32 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(GLib
PKG_CONFIG_NAME glib-2.0
LIB_NAMES glib-2.0
INCLUDE_NAMES glib.h glibconfig.h
INCLUDE_DIR_HINTS ${CMAKE_LIBRARY_PATH} ${CMAKE_SYSTEM_LIBRARY_PATH}
INCLUDE_DIR_PATHS ${CMAKE_PREFIX_PATH}/lib64 ${CMAKE_PREFIX_PATH}/lib
INCLUDE_DIR_SUFFIXES glib-2.0 glib-2.0/include
)
if(GLib_FOUND AND NOT GLib_VERSION)
find_file(GLib_CONFIG_HEADER "glibconfig.h" HINTS ${GLib_INCLUDE_DIRS})
mark_as_advanced(GLib_CONFIG_HEADER)
if(GLib_CONFIG_HEADER)
file(STRINGS "${GLib_CONFIG_HEADER}" GLib_MAJOR_VERSION REGEX "^#define GLIB_MAJOR_VERSION +([0-9]+)")
string(REGEX REPLACE "^#define GLIB_MAJOR_VERSION ([0-9]+)$" "\\1" GLib_MAJOR_VERSION "${GLib_MAJOR_VERSION}")
file(STRINGS "${GLib_CONFIG_HEADER}" GLib_MINOR_VERSION REGEX "^#define GLIB_MINOR_VERSION +([0-9]+)")
string(REGEX REPLACE "^#define GLIB_MINOR_VERSION ([0-9]+)$" "\\1" GLib_MINOR_VERSION "${GLib_MINOR_VERSION}")
file(STRINGS "${GLib_CONFIG_HEADER}" GLib_MICRO_VERSION REGEX "^#define GLIB_MICRO_VERSION +([0-9]+)")
string(REGEX REPLACE "^#define GLIB_MICRO_VERSION ([0-9]+)$" "\\1" GLib_MICRO_VERSION "${GLib_MICRO_VERSION}")
set(GLib_VERSION "${GLib_MAJOR_VERSION}.${GLib_MINOR_VERSION}.${GLib_MICRO_VERSION}")
unset(GLib_MAJOR_VERSION)
unset(GLib_MINOR_VERSION)
unset(GLib_MICRO_VERSION)
endif()
endif()
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(GLib
REQUIRED_VARS GLib_LIBRARY
VERSION_VAR GLib_VERSION)

19
cmake/FindGModule.cmake Normal file
View File

@ -0,0 +1,19 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(GModule
PKG_CONFIG_NAME gmodule-2.0
LIB_NAMES gmodule-2.0
INCLUDE_NAMES gmodule.h
INCLUDE_DIR_SUFFIXES glib-2.0 glib-2.0/include
DEPENDS GLib
)
if(GModule_FOUND AND NOT GModule_VERSION)
# TODO
find_package(GLib ${GLib_GLOBAL_VERSION})
set(GModule_VERSION ${GLib_VERSION})
endif()
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(GModule
REQUIRED_VARS GModule_LIBRARY
VERSION_VAR GModule_VERSION)

19
cmake/FindGObject.cmake Normal file
View File

@ -0,0 +1,19 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(GObject
PKG_CONFIG_NAME gobject-2.0
LIB_NAMES gobject-2.0
INCLUDE_NAMES gobject/gobject.h
INCLUDE_DIR_SUFFIXES glib-2.0 glib-2.0/include
DEPENDS GLib
)
if(GObject_FOUND AND NOT GObject_VERSION)
# TODO
find_package(GLib ${GLib_GLOBAL_VERSION})
set(GObject_VERSION ${GLib_VERSION})
endif()
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(GObject
REQUIRED_VARS GObject_LIBRARY
VERSION_VAR GObject_VERSION)

10
cmake/FindGPGME.cmake Normal file
View File

@ -0,0 +1,10 @@
include(PkgConfigWithFallbackOnConfigScript)
find_pkg_config_with_fallback_on_config_script(GPGME
PKG_CONFIG_NAME gpgme
CONFIG_SCRIPT_NAME gpgme
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(GPGME
REQUIRED_VARS GPGME_LIBRARY
VERSION_VAR GPGME_VERSION)

31
cmake/FindGTK3.cmake Normal file
View File

@ -0,0 +1,31 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(GTK3
PKG_CONFIG_NAME gtk+-3.0
LIB_NAMES gtk-3
INCLUDE_NAMES gtk/gtk.h
INCLUDE_DIR_SUFFIXES gtk-3.0 gtk-3.0/include gtk+-3.0 gtk+-3.0/include
DEPENDS GDK3 ATK
)
if(GTK3_FOUND AND NOT GTK3_VERSION)
find_file(GTK3_VERSION_HEADER "gtk/gtkversion.h" HINTS ${GTK3_INCLUDE_DIRS})
mark_as_advanced(GTK3_VERSION_HEADER)
if(GTK3_VERSION_HEADER)
file(STRINGS "${GTK3_VERSION_HEADER}" GTK3_MAJOR_VERSION REGEX "^#define GTK_MAJOR_VERSION +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define GTK_MAJOR_VERSION \\(?([0-9]+)\\)?$" "\\1" GTK3_MAJOR_VERSION "${GTK3_MAJOR_VERSION}")
file(STRINGS "${GTK3_VERSION_HEADER}" GTK3_MINOR_VERSION REGEX "^#define GTK_MINOR_VERSION +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define GTK_MINOR_VERSION \\(?([0-9]+)\\)?$" "\\1" GTK3_MINOR_VERSION "${GTK3_MINOR_VERSION}")
file(STRINGS "${GTK3_VERSION_HEADER}" GTK3_MICRO_VERSION REGEX "^#define GTK_MICRO_VERSION +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define GTK_MICRO_VERSION \\(?([0-9]+)\\)?$" "\\1" GTK3_MICRO_VERSION "${GTK3_MICRO_VERSION}")
set(GTK3_VERSION "${GTK3_MAJOR_VERSION}.${GTK3_MINOR_VERSION}.${GTK3_MICRO_VERSION}")
unset(GTK3_MAJOR_VERSION)
unset(GTK3_MINOR_VERSION)
unset(GTK3_MICRO_VERSION)
endif()
endif()
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(GTK3
REQUIRED_VARS GTK3_LIBRARY
VERSION_VAR GTK3_VERSION)

13
cmake/FindGee.cmake Normal file
View File

@ -0,0 +1,13 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(Gee
PKG_CONFIG_NAME gee-0.8
LIB_NAMES gee-0.8
INCLUDE_NAMES gee.h
INCLUDE_DIR_SUFFIXES gee-0.8 gee-0.8/include
DEPENDS GObject
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Gee
REQUIRED_VARS Gee_LIBRARY
VERSION_VAR Gee_VERSION)

20
cmake/FindGettext.cmake Normal file
View File

@ -0,0 +1,20 @@
find_program(XGETTEXT_EXECUTABLE xgettext)
find_program(MSGMERGE_EXECUTABLE msgmerge)
find_program(MSGFMT_EXECUTABLE msgfmt)
find_program(MSGCAT_EXECUTABLE msgcat)
mark_as_advanced(XGETTEXT_EXECUTABLE MSGMERGE_EXECUTABLE MSGFMT_EXECUTABLE MSGCAT_EXECUTABLE)
if(XGETTEXT_EXECUTABLE)
execute_process(COMMAND ${XGETTEXT_EXECUTABLE} "--version"
OUTPUT_VARIABLE Gettext_VERSION
OUTPUT_STRIP_TRAILING_WHITESPACE)
string(REGEX REPLACE "xgettext \\(GNU gettext-tools\\) ([0-9\\.]*).*" "\\1" Gettext_VERSION "${Gettext_VERSION}")
endif(XGETTEXT_EXECUTABLE)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Gettext
FOUND_VAR Gettext_FOUND
REQUIRED_VARS XGETTEXT_EXECUTABLE MSGMERGE_EXECUTABLE MSGFMT_EXECUTABLE MSGCAT_EXECUTABLE
VERSION_VAR Gettext_VERSION)
set(GETTEXT_USE_FILE "${CMAKE_CURRENT_LIST_DIR}/UseGettext.cmake")

13
cmake/FindGnuTLS.cmake Normal file
View File

@ -0,0 +1,13 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(GnuTLS
PKG_CONFIG_NAME gnutls
LIB_NAMES gnutls
INCLUDE_NAMES gnutls/gnutls.h
INCLUDE_DIR_SUFFIXES gnutls gnutls/include
DEPENDS GLib
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(GnuTLS
REQUIRED_VARS GnuTLS_LIBRARY
VERSION_VAR GnuTLS_VERSION)

14
cmake/FindGspell.cmake Normal file
View File

@ -0,0 +1,14 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(Gspell
PKG_CONFIG_NAME gspell-1
LIB_NAMES gspell-1
INCLUDE_NAMES gspell.h
INCLUDE_DIR_SUFFIXES gspell-1 gspell-1/gspell
DEPENDS Gtk
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Gspell
REQUIRED_VARS Gspell_LIBRARY
VERSION_VAR Gspell_VERSION)

12
cmake/FindGst.cmake Normal file
View File

@ -0,0 +1,12 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(Gst
PKG_CONFIG_NAME gstreamer-1.0
LIB_NAMES gstreamer-1.0
INCLUDE_NAMES gst/gst.h
INCLUDE_DIR_SUFFIXES gstreamer-1.0 gstreamer-1.0/include
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Gst
REQUIRED_VARS Gst_LIBRARY
VERSION_VAR Gst_VERSION)

14
cmake/FindGstApp.cmake Normal file
View File

@ -0,0 +1,14 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(GstApp
PKG_CONFIG_NAME gstreamer-app-1.0
LIB_NAMES gstapp
LIB_DIR_HINTS gstreamer-1.0
INCLUDE_NAMES gst/app/app.h
INCLUDE_DIR_SUFFIXES gstreamer-1.0 gstreamer-1.0/include gstreamer-app-1.0 gstreamer-app-1.0/include
DEPENDS Gst
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(GstApp
REQUIRED_VARS GstApp_LIBRARY
VERSION_VAR GstApp_VERSION)

14
cmake/FindGstAudio.cmake Normal file
View File

@ -0,0 +1,14 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(GstAudio
PKG_CONFIG_NAME gstreamer-audio-1.0
LIB_NAMES gstaudio
LIB_DIR_HINTS gstreamer-1.0
INCLUDE_NAMES gst/audio/audio.h
INCLUDE_DIR_SUFFIXES gstreamer-1.0 gstreamer-1.0/include gstreamer-audio-1.0 gstreamer-audio-1.0/include
DEPENDS Gst
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(GstAudio
REQUIRED_VARS GstAudio_LIBRARY
VERSION_VAR GstAudio_VERSION)

19
cmake/FindGstRtp.cmake Normal file
View File

@ -0,0 +1,19 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(GstRtp
PKG_CONFIG_NAME gstreamer-rtp-1.0
LIB_NAMES gstrtp
LIB_DIR_HINTS gstreamer-1.0
INCLUDE_NAMES gst/rtp/rtp.h
INCLUDE_DIR_SUFFIXES gstreamer-1.0 gstreamer-1.0/include gstreamer-rtp-1.0 gstreamer-rtp-1.0/include
DEPENDS Gst
)
if(GstRtp_FOUND AND NOT GstRtp_VERSION)
find_package(Gst)
set(GstRtp_VERSION ${Gst_VERSION})
endif()
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(GstRtp
REQUIRED_VARS GstRtp_LIBRARY
VERSION_VAR GstRtp_VERSION)

14
cmake/FindGstVideo.cmake Normal file
View File

@ -0,0 +1,14 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(GstVideo
PKG_CONFIG_NAME gstreamer-video-1.0
LIB_NAMES gstvideo
LIB_DIR_HINTS gstreamer-1.0
INCLUDE_NAMES gst/video/video.h
INCLUDE_DIR_SUFFIXES gstreamer-1.0 gstreamer-1.0/include gstreamer-video-1.0 gstreamer-video-1.0/include
DEPENDS Gst
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(GstVideo
REQUIRED_VARS GstVideo_LIBRARY
VERSION_VAR GstVideo_VERSION)

11
cmake/FindICU.cmake Normal file
View File

@ -0,0 +1,11 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(ICU
PKG_CONFIG_NAME icu-uc
LIB_NAMES icuuc icudata
INCLUDE_NAMES unicode/umachine.h
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(ICU
REQUIRED_VARS ICU_LIBRARY
VERSION_VAR ICU_VERSION)

13
cmake/FindNice.cmake Normal file
View File

@ -0,0 +1,13 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(Nice
PKG_CONFIG_NAME nice
LIB_NAMES nice
INCLUDE_NAMES nice.h
INCLUDE_DIR_SUFFIXES nice nice/include
DEPENDS GIO
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Nice
REQUIRED_VARS Nice_LIBRARY
VERSION_VAR Nice_VERSION)

33
cmake/FindPango.cmake Normal file
View File

@ -0,0 +1,33 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(Pango
PKG_CONFIG_NAME pango
LIB_NAMES pango-1.0
INCLUDE_NAMES pango/pango.h
INCLUDE_DIR_SUFFIXES pango-1.0 pango-1.0/include
DEPENDS GObject
)
if(Pango_FOUND AND NOT Pango_VERSION)
find_file(Pango_FEATURES_HEADER "pango/pango-features.h" HINTS ${Pango_INCLUDE_DIRS})
mark_as_advanced(Pango_FEATURES_HEADER)
if(Pango_FEATURES_HEADER)
file(STRINGS "${Pango_FEATURES_HEADER}" Pango_MAJOR_VERSION REGEX "^#define PANGO_VERSION_MAJOR +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define PANGO_VERSION_MAJOR \\(?([0-9]+)\\)?$" "\\1" Pango_MAJOR_VERSION "${Pango_MAJOR_VERSION}")
file(STRINGS "${Pango_FEATURES_HEADER}" Pango_MINOR_VERSION REGEX "^#define PANGO_VERSION_MINOR +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define PANGO_VERSION_MINOR \\(?([0-9]+)\\)?$" "\\1" Pango_MINOR_VERSION "${Pango_MINOR_VERSION}")
file(STRINGS "${Pango_FEATURES_HEADER}" Pango_MICRO_VERSION REGEX "^#define PANGO_VERSION_MICRO +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define PANGO_VERSION_MICRO \\(?([0-9]+)\\)?$" "\\1" Pango_MICRO_VERSION "${Pango_MICRO_VERSION}")
set(Pango_VERSION "${Pango_MAJOR_VERSION}.${Pango_MINOR_VERSION}.${Pango_MICRO_VERSION}")
unset(Pango_MAJOR_VERSION)
unset(Pango_MINOR_VERSION)
unset(Pango_MICRO_VERSION)
endif()
endif()
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Pango
FOUND_VAR Pango_FOUND
REQUIRED_VARS Pango_LIBRARY
VERSION_VAR Pango_VERSION
)

11
cmake/FindQrencode.cmake Normal file
View File

@ -0,0 +1,11 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(Qrencode
PKG_CONFIG_NAME libqrencode
LIB_NAMES qrencode
INCLUDE_NAMES qrencode.h
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Qrencode
REQUIRED_VARS Qrencode_LIBRARY
VERSION_VAR Qrencode_VERSION)

21
cmake/FindSQLite3.cmake Normal file
View File

@ -0,0 +1,21 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(SQLite3
PKG_CONFIG_NAME sqlite3
LIB_NAMES sqlite3
INCLUDE_NAMES sqlite3.h
)
if(SQLite3_FOUND AND NOT SQLite3_VERSION)
find_file(SQLite3_HEADER "sqlite3.h" HINTS ${SQLite3_INCLUDE_DIRS})
mark_as_advanced(SQLite3_HEADER)
if(SQLite3_HEADER)
file(STRINGS "${SQLite3_HEADER}" SQLite3_VERSION REGEX "^#define SQLITE_VERSION +\\\"[^\\\"]+\\\"")
string(REGEX REPLACE "^#define SQLITE_VERSION +\\\"([0-9]+)\\.([0-9]+)\\.([0-9]+)\\\"$" "\\1.\\2.\\3" SQLite3_VERSION "${SQLite3_VERSION}")
endif()
endif()
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(SQLite3
REQUIRED_VARS SQLite3_LIBRARY
VERSION_VAR SQLite3_VERSION)

View File

@ -0,0 +1,11 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(SignalProtocol
PKG_CONFIG_NAME libsignal-protocol-c
LIB_NAMES signal-protocol-c
INCLUDE_NAMES signal/signal_protocol.h
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(SignalProtocol
REQUIRED_VARS SignalProtocol_LIBRARY
VERSION_VAR SignalProtocol_VERSION)

31
cmake/FindSoup.cmake Normal file
View File

@ -0,0 +1,31 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(Soup
PKG_CONFIG_NAME libsoup-2.4
LIB_NAMES soup-2.4
INCLUDE_NAMES libsoup/soup.h
INCLUDE_DIR_SUFFIXES libsoup-2.4 libsoup-2.4/include libsoup libsoup/include
DEPENDS GIO
)
if(Soup_FOUND AND NOT Soup_VERSION)
find_file(Soup_VERSION_HEADER "libsoup/soup-version.h" HINTS ${Soup_INCLUDE_DIRS})
mark_as_advanced(Soup_VERSION_HEADER)
if(Soup_VERSION_HEADER)
file(STRINGS "${Soup_VERSION_HEADER}" Soup_MAJOR_VERSION REGEX "^#define SOUP_MAJOR_VERSION +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define SOUP_MAJOR_VERSION \\(?([0-9]+)\\)?$" "\\1" Soup_MAJOR_VERSION "${Soup_MAJOR_VERSION}")
file(STRINGS "${Soup_VERSION_HEADER}" Soup_MINOR_VERSION REGEX "^#define SOUP_MINOR_VERSION +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define SOUP_MINOR_VERSION \\(?([0-9]+)\\)?$" "\\1" Soup_MINOR_VERSION "${Soup_MINOR_VERSION}")
file(STRINGS "${Soup_VERSION_HEADER}" Soup_MICRO_VERSION REGEX "^#define SOUP_MICRO_VERSION +\\(?([0-9]+)\\)?$")
string(REGEX REPLACE "^#define SOUP_MICRO_VERSION \\(?([0-9]+)\\)?$" "\\1" Soup_MICRO_VERSION "${Soup_MICRO_VERSION}")
set(Soup_VERSION "${Soup_MAJOR_VERSION}.${Soup_MINOR_VERSION}.${Soup_MICRO_VERSION}")
unset(Soup_MAJOR_VERSION)
unset(Soup_MINOR_VERSION)
unset(Soup_MICRO_VERSION)
endif()
endif()
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Soup
REQUIRED_VARS Soup_LIBRARY
VERSION_VAR Soup_VERSION)

12
cmake/FindSrtp2.cmake Normal file
View File

@ -0,0 +1,12 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(Srtp2
PKG_CONFIG_NAME libsrtp2
LIB_NAMES srtp2
INCLUDE_NAMES srtp2/srtp.h
INCLUDE_DIR_SUFFIXES srtp2 srtp2/include
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Srtp2
REQUIRED_VARS Srtp2_LIBRARY
VERSION_VAR Srtp2_VERSION)

73
cmake/FindVala.cmake Normal file
View File

@ -0,0 +1,73 @@
##
# Find module for the Vala compiler (valac)
#
# This module determines wheter a Vala compiler is installed on the current
# system and where its executable is.
#
# Call the module using "find_package(Vala) from within your CMakeLists.txt.
#
# The following variables will be set after an invocation:
#
# VALA_FOUND Whether the vala compiler has been found or not
# VALA_EXECUTABLE Full path to the valac executable if it has been found
# VALA_VERSION Version number of the available valac
# VALA_USE_FILE Include this file to define the vala_precompile function
##
##
# Copyright 2009-2010 Jakob Westhoff. All rights reserved.
# Copyright 2010-2011 Daniel Pfeifer
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY JAKOB WESTHOFF ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
# EVENT SHALL JAKOB WESTHOFF OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are those
# of the authors and should not be interpreted as representing official policies,
# either expressed or implied, of Jakob Westhoff
##
# Search for the valac executable in the usual system paths
# Some distributions rename the valac to contain the major.minor in the binary name
find_package(GObject REQUIRED)
find_program(VALA_EXECUTABLE NAMES valac valac-0.38 valac-0.36 valac-0.34 valac-0.32)
mark_as_advanced(VALA_EXECUTABLE)
# Determine the valac version
if(VALA_EXECUTABLE)
file(TO_NATIVE_PATH "${VALA_EXECUTABLE}" VALA_EXECUTABLE)
execute_process(COMMAND ${VALA_EXECUTABLE} "--version"
OUTPUT_VARIABLE VALA_VERSION
OUTPUT_STRIP_TRAILING_WHITESPACE)
string(REPLACE "Vala " "" VALA_VERSION "${VALA_VERSION}")
endif(VALA_EXECUTABLE)
# Handle the QUIETLY and REQUIRED arguments, which may be given to the find call.
# Furthermore set VALA_FOUND to TRUE if Vala has been found (aka.
# VALA_EXECUTABLE is set)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Vala
FOUND_VAR VALA_FOUND
REQUIRED_VARS VALA_EXECUTABLE
VERSION_VAR VALA_VERSION)
set(VALA_USE_FILE "${CMAKE_CURRENT_LIST_DIR}/UseVala.cmake")

View File

@ -0,0 +1,12 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(WebRTCAudioProcessing
PKG_CONFIG_NAME webrtc-audio-processing
LIB_NAMES webrtc_audio_processing
INCLUDE_NAMES webrtc/modules/audio_processing/include/audio_processing.h
INCLUDE_DIR_SUFFIXES webrtc-audio-processing webrtc_audio_processing
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(WebRTCAudioProcessing
REQUIRED_VARS WebRTCAudioProcessing_LIBRARY
VERSION_VAR WebRTCAudioProcessing_VERSION)

124
cmake/GenerateGXML.cmake Normal file
View File

@ -0,0 +1,124 @@
include(CMakeParseArguments)
# Generates the resource XML controlling file from resource list (and saves it
# to xml_path). It's not recommended to use this function directly, since it
# doesn't handle invalid arguments. It is used by the function
# COMPILE_GRESOURCES() to create a custom command, so that this function is
# invoked at build-time in script mode from CMake.
function(GENERATE_GXML xml_path)
# Available options:
# COMPRESS_ALL, NO_COMPRESS_ALL Overrides the COMPRESS flag in all
# registered resources.
# STRIPBLANKS_ALL, NO_STRIPBLANKS_ALL Overrides the STRIPBLANKS flag in all
# registered resources.
# TOPIXDATA_ALL, NO_TOPIXDATA_ALL Overrides the TOPIXDATA flag in all
# registered resources.
set(GXML_OPTIONS COMPRESS_ALL NO_COMPRESS_ALL
STRIPBLANKS_ALL NO_STRIPBLANKS_ALL
TOPIXDATA_ALL NO_TOPIXDATA_ALL)
# Available one value options:
# PREFIX Overrides the resource prefix that is prepended to each
# relative file name in registered resources.
set(GXML_ONEVALUEARGS PREFIX)
# Available multi-value options:
# RESOURCES The list of resource files. Whether absolute or relative path is
# equal, absolute paths are stripped down to relative ones. If the
# absolute path is not inside the given base directory SOURCE_DIR
# or CMAKE_SOURCE_DIR (if SOURCE_DIR is not overriden), this
# function aborts.
set(GXML_MULTIVALUEARGS RESOURCES)
# Parse the arguments.
cmake_parse_arguments(GXML_ARG
"${GXML_OPTIONS}"
"${GXML_ONEVALUEARGS}"
"${GXML_MULTIVALUEARGS}"
"${ARGN}")
# Variable to store the double-quote (") string. Since escaping
# double-quotes in strings is not possible we need a helper variable that
# does this job for us.
set(Q \")
# Process resources and generate XML file.
# Begin with the XML header and header nodes.
set(GXML_XML_FILE "<?xml version=${Q}1.0${Q} encoding=${Q}UTF-8${Q}?>")
set(GXML_XML_FILE "${GXML_XML_FILE}<gresources><gresource prefix=${Q}")
# Set the prefix for the resources. Depending on the user-override we choose
# the standard prefix "/" or the override.
if (GXML_ARG_PREFIX)
set(GXML_XML_FILE "${GXML_XML_FILE}${GXML_ARG_PREFIX}")
else()
set(GXML_XML_FILE "${GXML_XML_FILE}/")
endif()
set(GXML_XML_FILE "${GXML_XML_FILE}${Q}>")
# Process each resource.
foreach(res ${GXML_ARG_RESOURCES})
if ("${res}" STREQUAL "COMPRESS")
set(GXML_COMPRESSION_FLAG ON)
elseif ("${res}" STREQUAL "STRIPBLANKS")
set(GXML_STRIPBLANKS_FLAG ON)
elseif ("${res}" STREQUAL "TOPIXDATA")
set(GXML_TOPIXDATA_FLAG ON)
else()
# The file name.
set(GXML_RESOURCE_PATH "${res}")
# Append to real resource file dependency list.
list(APPEND GXML_RESOURCES_DEPENDENCIES ${GXML_RESOURCE_PATH})
# Assemble <file> node.
set(GXML_RES_LINE "<file")
if ((GXML_ARG_COMPRESS_ALL OR GXML_COMPRESSION_FLAG) AND NOT
GXML_ARG_NO_COMPRESS_ALL)
set(GXML_RES_LINE "${GXML_RES_LINE} compressed=${Q}true${Q}")
endif()
# Check preprocess flag validity.
if ((GXML_ARG_STRIPBLANKS_ALL OR GXML_STRIPBLANKS_FLAG) AND
(GXML_ARG_TOPIXDATA_ALL OR GXML_TOPIXDATA_FLAG))
set(GXML_ERRMSG "Resource preprocessing option conflict. Tried")
set(GXML_ERRMSG "${GXML_ERRMSG} to specify both, STRIPBLANKS")
set(GXML_ERRMSG "${GXML_ERRMSG} and TOPIXDATA. In resource")
set(GXML_ERRMSG "${GXML_ERRMSG} ${GXML_RESOURCE_PATH} in")
set(GXML_ERRMSG "${GXML_ERRMSG} function COMPILE_GRESOURCES.")
message(FATAL_ERROR ${GXML_ERRMSG})
endif()
if ((GXML_ARG_STRIPBLANKS_ALL OR GXML_STRIPBLANKS_FLAG) AND NOT
GXML_ARG_NO_STRIPBLANKS_ALL)
set(GXML_RES_LINE "${GXML_RES_LINE} preprocess=")
set(GXML_RES_LINE "${GXML_RES_LINE}${Q}xml-stripblanks${Q}")
elseif((GXML_ARG_TOPIXDATA_ALL OR GXML_TOPIXDATA_FLAG) AND NOT
GXML_ARG_NO_TOPIXDATA_ALL)
set(GXML_RES_LINE "${GXML_RES_LINE} preprocess=")
set(GXML_RES_LINE "${GXML_RES_LINE}${Q}to-pixdata${Q}")
endif()
set(GXML_RES_LINE "${GXML_RES_LINE}>${GXML_RESOURCE_PATH}</file>")
# Append to file string.
set(GXML_XML_FILE "${GXML_XML_FILE}${GXML_RES_LINE}")
# Unset variables.
unset(GXML_COMPRESSION_FLAG)
unset(GXML_STRIPBLANKS_FLAG)
unset(GXML_TOPIXDATA_FLAG)
endif()
endforeach()
# Append closing nodes.
set(GXML_XML_FILE "${GXML_XML_FILE}</gresource></gresources>")
# Use "file" function to generate XML controlling file.
get_filename_component(xml_path_only_name "${xml_path}" NAME)
file(WRITE ${xml_path} ${GXML_XML_FILE})
endfunction()

View File

@ -0,0 +1,11 @@
# Path to this file.
set(GCR_CMAKE_MACRO_DIR ${CMAKE_CURRENT_LIST_DIR})
# Finds the glib-compile-resources executable.
find_program(GLIB_COMPILE_RESOURCES_EXECUTABLE glib-compile-resources)
mark_as_advanced(GLIB_COMPILE_RESOURCES_EXECUTABLE)
# Include the cmake files containing the functions.
include(${GCR_CMAKE_MACRO_DIR}/CompileGResources.cmake)
include(${GCR_CMAKE_MACRO_DIR}/GenerateGXML.cmake)

11
cmake/LargeFileOffsets.c Normal file
View File

@ -0,0 +1,11 @@
#include <sys/types.h>
#define _K ((off_t)1024)
#define _M ((off_t)1024 * _K)
#define _G ((off_t)1024 * _M)
#define _T ((off_t)1024 * _G)
int test[(((64 * _G -1) % 671088649) == 268434537) && (((_T - (64 * _G -1) + 255) % 1792151290) == 305159546)? 1: -1];
int main() {
return 0;
}

45
cmake/MultiFind.cmake Normal file
View File

@ -0,0 +1,45 @@
include(CMakeParseArguments)
function(find_packages result)
cmake_parse_arguments(ARGS "" "" "REQUIRED;OPTIONAL" ${ARGN})
set(_res "")
set(_res_libs "")
foreach(pkg ${ARGS_REQUIRED})
string(REPLACE ">=" ";" pkg_ ${pkg})
list(GET pkg_ "0" pkg)
list(LENGTH pkg_ pkg_has_version)
if(pkg_has_version GREATER 1)
list(GET pkg_ "1" pkg_version)
else()
if(${pkg}_GLOBAL_VERSION)
set(pkg_version ${${pkg}_GLOBAL_VERSION})
else()
unset(pkg_version)
endif()
endif()
find_package(${pkg} ${pkg_version} REQUIRED)
list(APPEND _res ${${pkg}_PKG_CONFIG_NAME})
list(APPEND _res_libs ${${pkg}_LIBRARIES})
endforeach(pkg)
foreach(pkg ${ARGS_OPTIONAL})
string(REPLACE ">=" ";" pkg_ ${pkg})
list(GET pkg_ "0" pkg)
list(LENGTH pkg_ pkg_has_version)
if(pkg_has_version GREATER 1)
list(GET pkg_ "1" pkg_version)
else()
if(${pkg}_GLOBAL_VERSION)
set(pkg_version ${${pkg}_GLOBAL_VERSION})
else()
unset(pkg_version)
endif()
endif()
find_package(${pkg} ${pkg_version})
if(${pkg}_FOUND)
list(APPEND _res ${${pkg}_PKG_CONFIG_NAME})
list(APPEND _res_libs ${${pkg}_LIBRARIES})
endif()
endforeach(pkg)
set(${result} "${_res}" PARENT_SCOPE)
set(${result}_LIBS "${_res_libs}" PARENT_SCOPE)
endfunction()

View File

@ -0,0 +1,102 @@
include(CMakeParseArguments)
function(find_pkg_config_with_fallback name)
cmake_parse_arguments(ARGS "" "PKG_CONFIG_NAME" "LIB_NAMES;LIB_DIR_HINTS;INCLUDE_NAMES;INCLUDE_DIR_PATHS;INCLUDE_DIR_HINTS;INCLUDE_DIR_SUFFIXES;DEPENDS" ${ARGN})
set(${name}_PKG_CONFIG_NAME ${ARGS_PKG_CONFIG_NAME} PARENT_SCOPE)
find_package(PkgConfig)
if(PKG_CONFIG_FOUND)
pkg_search_module(${name}_PKG_CONFIG QUIET ${ARGS_PKG_CONFIG_NAME})
endif(PKG_CONFIG_FOUND)
if (${name}_PKG_CONFIG_FOUND)
# Found via pkg-config, using its result values
set(${name}_FOUND ${${name}_PKG_CONFIG_FOUND})
# Try to find real file name of libraries
foreach(lib ${${name}_PKG_CONFIG_LIBRARIES})
find_library(${name}_${lib}_LIBRARY ${lib} HINTS ${${name}_PKG_CONFIG_LIBRARY_DIRS})
mark_as_advanced(${name}_${lib}_LIBRARY)
if(NOT ${name}_${lib}_LIBRARY)
unset(${name}_FOUND)
endif(NOT ${name}_${lib}_LIBRARY)
endforeach(lib)
if(${name}_FOUND)
set(${name}_LIBRARIES "")
foreach(lib ${${name}_PKG_CONFIG_LIBRARIES})
list(APPEND ${name}_LIBRARIES ${${name}_${lib}_LIBRARY})
endforeach(lib)
list(REMOVE_DUPLICATES ${name}_LIBRARIES)
set(${name}_LIBRARIES ${${name}_LIBRARIES} PARENT_SCOPE)
list(GET ${name}_LIBRARIES "0" ${name}_LIBRARY)
set(${name}_FOUND ${${name}_FOUND} PARENT_SCOPE)
set(${name}_INCLUDE_DIRS ${${name}_PKG_CONFIG_INCLUDE_DIRS} PARENT_SCOPE)
set(${name}_LIBRARIES ${${name}_PKG_CONFIG_LIBRARIES} PARENT_SCOPE)
set(${name}_LIBRARY ${${name}_LIBRARY} PARENT_SCOPE)
set(${name}_VERSION ${${name}_PKG_CONFIG_VERSION} PARENT_SCOPE)
if(NOT TARGET ${ARGS_PKG_CONFIG_NAME})
add_library(${ARGS_PKG_CONFIG_NAME} INTERFACE IMPORTED)
set_property(TARGET ${ARGS_PKG_CONFIG_NAME} PROPERTY INTERFACE_COMPILE_OPTIONS "${${name}_PKG_CONFIG_CFLAGS_OTHER}")
set_property(TARGET ${ARGS_PKG_CONFIG_NAME} PROPERTY INTERFACE_INCLUDE_DIRECTORIES "${${name}_PKG_CONFIG_INCLUDE_DIRS}")
set_property(TARGET ${ARGS_PKG_CONFIG_NAME} PROPERTY INTERFACE_LINK_LIBRARIES "${${name}_LIBRARIES}")
endif(NOT TARGET ${ARGS_PKG_CONFIG_NAME})
endif(${name}_FOUND)
else(${name}_PKG_CONFIG_FOUND)
# No success with pkg-config, try via find_library on all lib_names
set(${name}_FOUND "1")
foreach(lib ${ARGS_LIB_NAMES})
find_library(${name}_${lib}_LIBRARY ${ARGS_LIB_NAMES} HINTS ${ARGS_LIB_DIR_HINTS})
mark_as_advanced(${name}_${lib}_LIBRARY)
if(NOT ${name}_${lib}_LIBRARY)
unset(${name}_FOUND)
endif(NOT ${name}_${lib}_LIBRARY)
endforeach(lib)
foreach(inc ${ARGS_INCLUDE_NAMES})
find_path(${name}_${inc}_INCLUDE_PATH ${inc} HINTS ${ARGS_INCLUDE_DIR_HINTS} PATHS ${ARGS_INCLUDE_DIR_PATHS} PATH_SUFFIXES ${ARGS_INCLUDE_DIR_SUFFIXES})
mark_as_advanced(${name}_${inc}_INCLUDE_PATH)
if(NOT ${name}_${inc}_INCLUDE_PATH)
unset(${name}_FOUND)
endif(NOT ${name}_${inc}_INCLUDE_PATH)
endforeach(inc)
if(${name}_FOUND)
set(${name}_LIBRARIES "")
set(${name}_INCLUDE_DIRS "")
foreach(lib ${ARGS_LIB_NAMES})
list(APPEND ${name}_LIBRARIES ${${name}_${lib}_LIBRARY})
endforeach(lib)
foreach(inc ${ARGS_INCLUDE_NAMES})
list(APPEND ${name}_INCLUDE_DIRS ${${name}_${inc}_INCLUDE_PATH})
endforeach(inc)
list(GET ${name}_LIBRARIES "0" ${name}_LIBRARY)
foreach(dep ${ARGS_DEPENDS})
find_package(${dep} ${${dep}_GLOBAL_VERSION} QUIET)
if(${dep}_FOUND)
list(APPEND ${name}_INCLUDE_DIRS ${${dep}_INCLUDE_DIRS})
list(APPEND ${name}_LIBRARIES ${${dep}_LIBRARIES})
else(${dep}_FOUND)
unset(${name}_FOUND)
endif(${dep}_FOUND)
endforeach(dep)
set(${name}_FOUND ${${name}_FOUND} PARENT_SCOPE)
set(${name}_INCLUDE_DIRS ${${name}_INCLUDE_DIRS} PARENT_SCOPE)
set(${name}_LIBRARIES ${${name}_LIBRARIES} PARENT_SCOPE)
set(${name}_LIBRARY ${${name}_LIBRARY} PARENT_SCOPE)
unset(${name}_VERSION PARENT_SCOPE)
if(NOT TARGET ${ARGS_PKG_CONFIG_NAME})
add_library(${ARGS_PKG_CONFIG_NAME} INTERFACE IMPORTED)
set_property(TARGET ${ARGS_PKG_CONFIG_NAME} PROPERTY INTERFACE_INCLUDE_DIRECTORIES "${${name}_INCLUDE_DIRS}")
set_property(TARGET ${ARGS_PKG_CONFIG_NAME} PROPERTY INTERFACE_LINK_LIBRARIES "${${name}_LIBRARIES}")
endif(NOT TARGET ${ARGS_PKG_CONFIG_NAME})
endif(${name}_FOUND)
endif(${name}_PKG_CONFIG_FOUND)
endfunction()

View File

@ -0,0 +1,103 @@
include(CMakeParseArguments)
function(find_pkg_config_with_fallback_on_config_script name)
cmake_parse_arguments(ARGS "" "PKG_CONFIG_NAME" "CONFIG_SCRIPT_NAME" ${ARGN})
set(${name}_PKG_CONFIG_NAME ${ARGS_PKG_CONFIG_NAME} PARENT_SCOPE)
find_package(PkgConfig)
if(PKG_CONFIG_FOUND)
pkg_search_module(${name}_PKG_CONFIG QUIET ${ARGS_PKG_CONFIG_NAME})
endif(PKG_CONFIG_FOUND)
if (${name}_PKG_CONFIG_FOUND)
# Found via pkg-config, using it's result values
set(${name}_FOUND ${${name}_PKG_CONFIG_FOUND})
# Try to find real file name of libraries
foreach(lib ${${name}_PKG_CONFIG_LIBRARIES})
find_library(${name}_${lib}_LIBRARY ${lib} HINTS ${${name}_PKG_CONFIG_LIBRARY_DIRS})
mark_as_advanced(${name}_${lib}_LIBRARY)
if(NOT ${name}_${lib}_LIBRARY)
unset(${name}_FOUND)
endif(NOT ${name}_${lib}_LIBRARY)
endforeach(lib)
if(${name}_FOUND)
set(${name}_LIBRARIES "")
foreach(lib ${${name}_PKG_CONFIG_LIBRARIES})
list(APPEND ${name}_LIBRARIES ${${name}_${lib}_LIBRARY})
endforeach(lib)
list(REMOVE_DUPLICATES ${name}_LIBRARIES)
set(${name}_LIBRARIES ${${name}_LIBRARIES} PARENT_SCOPE)
list(GET ${name}_LIBRARIES "0" ${name}_LIBRARY)
set(${name}_FOUND ${${name}_FOUND} PARENT_SCOPE)
set(${name}_INCLUDE_DIRS ${${name}_PKG_CONFIG_INCLUDE_DIRS} PARENT_SCOPE)
set(${name}_LIBRARIES ${${name}_PKG_CONFIG_LIBRARIES} PARENT_SCOPE)
set(${name}_LIBRARY ${${name}_LIBRARY} PARENT_SCOPE)
set(${name}_VERSION ${${name}_PKG_CONFIG_VERSION} PARENT_SCOPE)
if(NOT TARGET ${ARGS_PKG_CONFIG_NAME})
add_library(${ARGS_PKG_CONFIG_NAME} INTERFACE IMPORTED)
set_property(TARGET ${ARGS_PKG_CONFIG_NAME} PROPERTY INTERFACE_COMPILE_OPTIONS "${${name}_PKG_CONFIG_CFLAGS_OTHER}")
set_property(TARGET ${ARGS_PKG_CONFIG_NAME} PROPERTY INTERFACE_INCLUDE_DIRECTORIES "${${name}_PKG_CONFIG_INCLUDE_DIRS}")
set_property(TARGET ${ARGS_PKG_CONFIG_NAME} PROPERTY INTERFACE_LINK_LIBRARIES "${${name}_LIBRARIES}")
endif(NOT TARGET ${ARGS_PKG_CONFIG_NAME})
endif(${name}_FOUND)
else(${name}_PKG_CONFIG_FOUND)
# No success with pkg-config, try via custom a *-config script
find_program(${name}_CONFIG_EXECUTABLE NAMES ${ARGS_CONFIG_SCRIPT_NAME}-config)
mark_as_advanced(${name}_CONFIG_EXECUTABLE)
find_program(${name}_SH_EXECUTABLE NAMES sh)
mark_as_advanced(${name}_SH_EXECUTABLE)
if(${name}_CONFIG_EXECUTABLE)
macro(config_script_fail errcode)
if(${errcode})
message(FATAL_ERROR "Error invoking ${ARGS_CONFIG_SCRIPT_NAME}-config: ${errcode}")
endif(${errcode})
endmacro(config_script_fail)
file(TO_NATIVE_PATH "${${name}_CONFIG_EXECUTABLE}" ${name}_CONFIG_EXECUTABLE)
file(TO_NATIVE_PATH "${${name}_SH_EXECUTABLE}" ${name}_SH_EXECUTABLE)
execute_process(COMMAND "${${name}_SH_EXECUTABLE}" "${${name}_CONFIG_EXECUTABLE}" --version
OUTPUT_VARIABLE ${name}_VERSION
RESULT_VARIABLE ERRCODE
OUTPUT_STRIP_TRAILING_WHITESPACE)
config_script_fail(${ERRCODE})
execute_process(COMMAND "${${name}_SH_EXECUTABLE}" "${${name}_CONFIG_EXECUTABLE}" --api-version
OUTPUT_VARIABLE ${name}_API_VERSION
RESULT_VARIABLE ERRCODE
OUTPUT_STRIP_TRAILING_WHITESPACE)
config_script_fail(${ERRCODE})
execute_process(COMMAND "${${name}_SH_EXECUTABLE}" "${${name}_CONFIG_EXECUTABLE}" --cflags
OUTPUT_VARIABLE ${name}_CFLAGS
RESULT_VARIABLE ERRCODE
OUTPUT_STRIP_TRAILING_WHITESPACE)
config_script_fail(${ERRCODE})
execute_process(COMMAND "${${name}_SH_EXECUTABLE}" "${${name}_CONFIG_EXECUTABLE}" --libs
OUTPUT_VARIABLE ${name}_LDFLAGS
RESULT_VARIABLE ERRCODE
OUTPUT_STRIP_TRAILING_WHITESPACE)
config_script_fail(${ERRCODE})
string(TOLOWER ${name} "${name}_LOWER")
string(REGEX REPLACE "^(.* |)-l([^ ]*${${name}_LOWER}[^ ]*)( .*|)$" "\\2" ${name}_LIBRARY_NAME "${${name}_LDFLAGS}")
string(REGEX REPLACE "^(.* |)-L([^ ]*)( .*|)$" "\\2" ${name}_LIBRARY_DIRS "${${name}_LDFLAGS}")
find_library(${name}_LIBRARY ${${name}_LIBRARY_NAME} HINTS ${${name}_LIBRARY_DIRS})
mark_as_advanced(${name}_LIBRARY)
set(${name}_LIBRARY ${${name}_LIBRARY} PARENT_SCOPE)
set(${name}_VERSION ${${name}_VERSION} PARENT_SCOPE)
unset(${name}_LIBRARY_NAME)
unset(${name}_LIBRARY_DIRS)
if(NOT TARGET ${name}_LOWER)
add_library(${name}_LOWER INTERFACE IMPORTED)
set_property(TARGET ${name}_LOWER PROPERTY INTERFACE_LINK_LIBRARIES "${${name}_LDFLAGS}")
set_property(TARGET ${name}_LOWER PROPERTY INTERFACE_COMPILE_OPTIONS "${${name}_CFLAGS}")
endif(NOT TARGET ${name}_LOWER)
endif(${name}_CONFIG_EXECUTABLE)
endif(${name}_PKG_CONFIG_FOUND)
endfunction()

28
cmake/UseGettext.cmake Normal file
View File

@ -0,0 +1,28 @@
function(_gettext_mkdir_for_file file)
get_filename_component(dir "${file}" DIRECTORY)
file(MAKE_DIRECTORY "${dir}")
endfunction()
function(gettext_compile project_name)
cmake_parse_arguments(ARGS "" "MO_FILES_NAME;TARGET_NAME;SOURCE_DIR;PROJECT_NAME" "" ${ARGN})
if(NOT ARGS_SOURCE_DIR)
set(ARGS_SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR})
endif(NOT ARGS_SOURCE_DIR)
file(STRINGS "${ARGS_SOURCE_DIR}/LINGUAS" LINGUAS)
set(target_files)
foreach(lang ${LINGUAS})
set(source_file ${ARGS_SOURCE_DIR}/${lang}.po)
set(target_file ${CMAKE_BINARY_DIR}/locale/${lang}/LC_MESSAGES/${project_name}.mo)
_gettext_mkdir_for_file(${target_file})
list(APPEND target_files ${target_file})
add_custom_command(OUTPUT ${target_file} COMMAND ${MSGFMT_EXECUTABLE} --check-format -o ${target_file} ${source_file} DEPENDS ${source_file})
install(FILES ${target_file} DESTINATION ${LOCALE_INSTALL_DIR}/${lang}/LC_MESSAGES)
endforeach(lang)
if(ARGS_MO_FILES_NAME)
set(${ARGS_MO_FILES_NAME} ${target_files} PARENT_SCOPE)
endif(ARGS_MO_FILES_NAME)
if(ARGS_TARGET_NAME)
add_custom_target(${ARGS_TARGET_NAME} DEPENDS ${target_files})
endif(ARGS_TARGET_NAME)
endfunction(gettext_compile)

337
cmake/UseVala.cmake Normal file
View File

@ -0,0 +1,337 @@
##
# Compile vala files to their c equivalents for further processing.
#
# The "vala_precompile" function takes care of calling the valac executable on
# the given source to produce c files which can then be processed further using
# default cmake functions.
#
# The first parameter provided is a variable, which will be filled with a list
# of c files outputted by the vala compiler. This list can than be used in
# conjuction with functions like "add_executable" or others to create the
# neccessary compile rules with CMake.
#
# The following sections may be specified afterwards to provide certain options
# to the vala compiler:
#
# SOURCES
# A list of .vala files to be compiled. Please take care to add every vala
# file belonging to the currently compiled project or library as Vala will
# otherwise not be able to resolve all dependencies.
#
# PACKAGES
# A list of vala packages/libraries to be used during the compile cycle. The
# package names are exactly the same, as they would be passed to the valac
# "--pkg=" option.
#
# OPTIONS
# A list of optional options to be passed to the valac executable. This can be
# used to pass "--thread" for example to enable multi-threading support.
#
# DEFINITIONS
# A list of symbols to be used for conditional compilation. They are the same
# as they would be passed using the valac "--define=" option.
#
# CUSTOM_VAPIS
# A list of custom vapi files to be included for compilation. This can be
# useful to include freshly created vala libraries without having to install
# them in the system.
#
# GENERATE_VAPI
# Pass all the needed flags to the compiler to create a vapi for
# the compiled library. The provided name will be used for this and a
# <provided_name>.vapi file will be created.
#
# GENERATE_HEADER
# Let the compiler generate a header file for the compiled code. There will
# be a header file as well as an internal header file being generated called
# <provided_name>.h and <provided_name>_internal.h
#
# The following call is a simple example to the vala_precompile macro showing
# an example to every of the optional sections:
#
# find_package(Vala "0.12" REQUIRED)
# include(${VALA_USE_FILE})
#
# vala_precompile(VALA_C
# SOURCES
# source1.vala
# source2.vala
# source3.vala
# PACKAGES
# gtk+-2.0
# gio-1.0
# posix
# DIRECTORY
# gen
# OPTIONS
# --thread
# CUSTOM_VAPIS
# some_vapi.vapi
# GENERATE_VAPI
# myvapi
# GENERATE_HEADER
# myheader
# )
#
# Most important is the variable VALA_C which will contain all the generated c
# file names after the call.
##
##
# Copyright 2009-2010 Jakob Westhoff. All rights reserved.
# Copyright 2010-2011 Daniel Pfeifer
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY JAKOB WESTHOFF ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
# EVENT SHALL JAKOB WESTHOFF OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are those
# of the authors and should not be interpreted as representing official policies,
# either expressed or implied, of Jakob Westhoff
##
include(CMakeParseArguments)
function(_vala_mkdir_for_file file)
get_filename_component(dir "${file}" DIRECTORY)
file(MAKE_DIRECTORY "${dir}")
endfunction()
function(vala_precompile output)
cmake_parse_arguments(ARGS "FAST_VAPI" "DIRECTORY;GENERATE_HEADER;GENERATE_VAPI;EXPORTS_DIR"
"SOURCES;PACKAGES;OPTIONS;DEFINITIONS;CUSTOM_VAPIS;CUSTOM_DEPS;GRESOURCES" ${ARGN})
# Header and internal header is needed to generate internal vapi
if (ARGS_GENERATE_VAPI AND NOT ARGS_GENERATE_HEADER)
set(ARGS_GENERATE_HEADER ${ARGS_GENERATE_VAPI})
endif(ARGS_GENERATE_VAPI AND NOT ARGS_GENERATE_HEADER)
if("Ninja" STREQUAL ${CMAKE_GENERATOR} AND NOT DISABLE_FAST_VAPI AND NOT ARGS_GENERATE_HEADER)
set(ARGS_FAST_VAPI true)
endif()
if(ARGS_DIRECTORY)
get_filename_component(DIRECTORY ${ARGS_DIRECTORY} ABSOLUTE)
else(ARGS_DIRECTORY)
set(DIRECTORY ${CMAKE_CURRENT_BINARY_DIR})
endif(ARGS_DIRECTORY)
if(ARGS_EXPORTS_DIR)
set(ARGS_EXPORTS_DIR ${CMAKE_BINARY_DIR}/${ARGS_EXPORTS_DIR})
else(ARGS_EXPORTS_DIR)
set(ARGS_EXPORTS_DIR ${CMAKE_BINARY_DIR}/exports)
endif(ARGS_EXPORTS_DIR)
file(MAKE_DIRECTORY "${ARGS_EXPORTS_DIR}")
include_directories(${DIRECTORY} ${ARGS_EXPORTS_DIR})
set(vala_pkg_opts "")
foreach(pkg ${ARGS_PACKAGES})
list(APPEND vala_pkg_opts "--pkg=${pkg}")
endforeach(pkg ${ARGS_PACKAGES})
set(vala_define_opts "")
foreach(def ${ARGS_DEFINITIONS})
list(APPEND vala_define_opts "--define=${def}")
endforeach(def ${ARGS_DEFINITIONS})
set(custom_vapi_arguments "")
if(ARGS_CUSTOM_VAPIS)
foreach(vapi ${ARGS_CUSTOM_VAPIS})
if(${vapi} MATCHES ${CMAKE_SOURCE_DIR} OR ${vapi} MATCHES ${CMAKE_BINARY_DIR})
list(APPEND custom_vapi_arguments ${vapi})
else (${vapi} MATCHES ${CMAKE_SOURCE_DIR} OR ${vapi} MATCHES ${CMAKE_BINARY_DIR})
list(APPEND custom_vapi_arguments ${CMAKE_CURRENT_SOURCE_DIR}/${vapi})
endif(${vapi} MATCHES ${CMAKE_SOURCE_DIR} OR ${vapi} MATCHES ${CMAKE_BINARY_DIR})
endforeach(vapi ${ARGS_CUSTOM_VAPIS})
endif(ARGS_CUSTOM_VAPIS)
set(gresources_args "")
if(ARGS_GRESOURCES)
set(gresources_args --gresources "${ARGS_GRESOURCES}")
endif(ARGS_GRESOURCES)
set(in_files "")
set(fast_vapi_files "")
set(out_files "")
set(out_extra_files "")
set(out_deps_files "")
set(vapi_arguments "")
if(ARGS_GENERATE_VAPI)
list(APPEND out_extra_files "${ARGS_EXPORTS_DIR}/${ARGS_GENERATE_VAPI}.vapi")
list(APPEND out_extra_files "${ARGS_EXPORTS_DIR}/${ARGS_GENERATE_VAPI}_internal.vapi")
set(vapi_arguments "--vapi=${ARGS_EXPORTS_DIR}/${ARGS_GENERATE_VAPI}.vapi" "--internal-vapi=${ARGS_EXPORTS_DIR}/${ARGS_GENERATE_VAPI}_internal.vapi")
if(ARGS_PACKAGES)
string(REPLACE ";" "\\n" pkgs "${ARGS_PACKAGES};${ARGS_CUSTOM_DEPS}")
add_custom_command(OUTPUT "${ARGS_EXPORTS_DIR}/${ARGS_GENERATE_VAPI}.deps" COMMAND echo -e "\"${pkgs}\"" > "${ARGS_EXPORTS_DIR}/${ARGS_GENERATE_VAPI}.deps" COMMENT "Generating ${ARGS_GENERATE_VAPI}.deps")
endif(ARGS_PACKAGES)
endif(ARGS_GENERATE_VAPI)
set(header_arguments "")
if(ARGS_GENERATE_HEADER)
list(APPEND out_extra_files "${ARGS_EXPORTS_DIR}/${ARGS_GENERATE_HEADER}.h")
list(APPEND out_extra_files "${ARGS_EXPORTS_DIR}/${ARGS_GENERATE_HEADER}_internal.h")
list(APPEND header_arguments "--header=${ARGS_EXPORTS_DIR}/${ARGS_GENERATE_HEADER}.h")
list(APPEND header_arguments "--internal-header=${ARGS_EXPORTS_DIR}/${ARGS_GENERATE_HEADER}_internal.h")
endif(ARGS_GENERATE_HEADER)
string(REPLACE " " ";" VALAC_FLAGS ${CMAKE_VALA_FLAGS})
if (VALA_VERSION VERSION_GREATER "0.38")
set(VALAC_COLORS "--color=always")
endif ()
if(ARGS_FAST_VAPI)
foreach(src ${ARGS_SOURCES} ${ARGS_UNPARSED_ARGUMENTS})
set(in_file "${CMAKE_CURRENT_SOURCE_DIR}/${src}")
list(APPEND in_files "${in_file}")
string(REPLACE ".vala" ".c" src ${src})
string(REPLACE ".gs" ".c" src ${src})
string(REPLACE ".c" ".vapi" fast_vapi ${src})
set(fast_vapi_file "${DIRECTORY}/${fast_vapi}")
list(APPEND fast_vapi_files "${fast_vapi_file}")
list(APPEND out_files "${DIRECTORY}/${src}")
_vala_mkdir_for_file("${fast_vapi_file}")
add_custom_command(OUTPUT ${fast_vapi_file}
COMMAND
${VALA_EXECUTABLE}
ARGS
${VALAC_COLORS}
--fast-vapi ${fast_vapi_file}
${vala_define_opts}
${ARGS_OPTIONS}
${VALAC_FLAGS}
${in_file}
DEPENDS
${in_file}
COMMENT
"Generating fast VAPI ${fast_vapi}"
)
endforeach(src ${ARGS_SOURCES} ${ARGS_UNPARSED_ARGUMENTS})
foreach(src ${ARGS_SOURCES} ${ARGS_UNPARSED_ARGUMENTS})
set(in_file "${CMAKE_CURRENT_SOURCE_DIR}/${src}")
string(REPLACE ".vala" ".c" c_code ${src})
string(REPLACE ".gs" ".c" c_code ${c_code})
string(REPLACE ".c" ".vapi" fast_vapi ${c_code})
set(my_fast_vapi_file "${DIRECTORY}/${fast_vapi}")
set(c_code_file "${DIRECTORY}/${c_code}")
set(fast_vapi_flags "")
set(fast_vapi_stamp "")
foreach(fast_vapi_file ${fast_vapi_files})
if(NOT "${fast_vapi_file}" STREQUAL "${my_fast_vapi_file}")
list(APPEND fast_vapi_flags --use-fast-vapi "${fast_vapi_file}")
list(APPEND fast_vapi_stamp "${fast_vapi_file}")
endif()
endforeach(fast_vapi_file)
_vala_mkdir_for_file("${fast_vapi_file}")
get_filename_component(dir "${c_code_file}" DIRECTORY)
add_custom_command(OUTPUT ${c_code_file}
COMMAND
${VALA_EXECUTABLE}
ARGS
${VALAC_COLORS}
"-C"
"-d" ${dir}
${vala_pkg_opts}
${vala_define_opts}
${gresources_args}
${ARGS_OPTIONS}
${VALAC_FLAGS}
${fast_vapi_flags}
${in_file}
${custom_vapi_arguments}
DEPENDS
${fast_vapi_stamp}
${in_file}
${ARGS_CUSTOM_VAPIS}
${ARGS_GRESOURCES}
COMMENT
"Generating C source ${c_code}"
)
endforeach(src)
if(NOT "${out_extra_files}" STREQUAL "")
add_custom_command(OUTPUT ${out_extra_files}
COMMAND
${VALA_EXECUTABLE}
ARGS
${VALAC_COLORS}
-C -q --disable-warnings
${header_arguments}
${vapi_arguments}
"-b" ${CMAKE_CURRENT_SOURCE_DIR}
"-d" ${DIRECTORY}
${vala_pkg_opts}
${vala_define_opts}
${gresources_args}
${ARGS_OPTIONS}
${VALAC_FLAGS}
${in_files}
${custom_vapi_arguments}
DEPENDS
${in_files}
${ARGS_CUSTOM_VAPIS}
${ARGS_GRESOURCES}
COMMENT
"Generating VAPI and headers for target ${output}"
)
endif()
else(ARGS_FAST_VAPI)
foreach(src ${ARGS_SOURCES} ${ARGS_UNPARSED_ARGUMENTS})
set(in_file "${CMAKE_CURRENT_SOURCE_DIR}/${src}")
list(APPEND in_files "${in_file}")
string(REPLACE ".vala" ".c" src ${src})
string(REPLACE ".gs" ".c" src ${src})
list(APPEND out_files "${DIRECTORY}/${src}")
_vala_mkdir_for_file("${fast_vapi_file}")
endforeach(src ${ARGS_SOURCES} ${ARGS_UNPARSED_ARGUMENTS})
add_custom_command(OUTPUT ${out_files} ${out_extra_files}
COMMAND
${VALA_EXECUTABLE}
ARGS
${VALAC_COLORS}
-C
${header_arguments}
${vapi_arguments}
"-b" ${CMAKE_CURRENT_SOURCE_DIR}
"-d" ${DIRECTORY}
${vala_pkg_opts}
${vala_define_opts}
${gresources_args}
${ARGS_OPTIONS}
${VALAC_FLAGS}
${in_files}
${custom_vapi_arguments}
DEPENDS
${in_files}
${ARGS_CUSTOM_VAPIS}
${ARGS_GRESOURCES}
COMMENT
"Generating C code for target ${output}"
)
endif(ARGS_FAST_VAPI)
set(${output} ${out_files} PARENT_SCOPE)
endfunction(vala_precompile)

View File

@ -0,0 +1,21 @@
if(NOT EXISTS "@CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt")
message(FATAL_ERROR "Cannot find install manifest: @CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt")
endif(NOT EXISTS "@CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt")
file(READ "@CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt" files)
string(REGEX REPLACE "\n" ";" files "${files}")
foreach(file ${files})
message(STATUS "Uninstalling: $ENV{DESTDIR}${file}")
if(IS_SYMLINK "$ENV{DESTDIR}${file}" OR EXISTS "$ENV{DESTDIR}${file}")
exec_program(
"@CMAKE_COMMAND@" ARGS "-E remove \"$ENV{DESTDIR}${file}\""
OUTPUT_VARIABLE rm_out
RETURN_VALUE rm_retval
)
if(NOT "${rm_retval}" STREQUAL 0)
message(FATAL_ERROR "Problem when removing $ENV{DESTDIR}${file}")
endif(NOT "${rm_retval}" STREQUAL 0)
else(IS_SYMLINK "$ENV{DESTDIR}${file}" OR EXISTS "$ENV{DESTDIR}${file}")
message(STATUS "File $ENV{DESTDIR}${file} does not exist.")
endif(IS_SYMLINK "$ENV{DESTDIR}${file}" OR EXISTS "$ENV{DESTDIR}${file}")
endforeach(file)

294
configure vendored Executable file
View File

@ -0,0 +1,294 @@
#!/bin/sh
OPTS=`getopt -o "h" --long \
help,fetch-only,no-debug,disable-fast-vapi,with-tests,release,with-libsignal-in-tree,\
enable-plugin:,disable-plugin:,\
prefix:,program-prefix:,exec-prefix:,lib-suffix:,\
bindir:,libdir:,includedir:,datadir:,\
host:,build:,\
sbindir:,sysconfdir:,libexecdir:,localstatedir:,sharedstatedir:,mandir:,infodir:,\
-n './configure' -- "$@"`
if [ $? != 0 ] ; then echo "-- Ignoring unrecognized options." >&2 ; fi
eval set -- "$OPTS"
PREFIX=${PREFIX:-/usr/local}
ENABLED_PLUGINS=
DISABLED_PLUGINS=
BUILD_LIBSIGNAL_IN_TREE=
BUILD_TESTS=
BUILD_TYPE=Debug
DISABLE_FAST_VAPI=
LIB_SUFFIX=
NO_DEBUG=
FETCH_ONLY=
EXEC_PREFIX=
BINDIR=
SBINDIR=n
SYSCONFDIR=
DATADIR=
INCLUDEDIR=
LIBDIR=
LIBEXECDIR=
LOCALSTATEDIR=
SHAREDSTATEDIR=
MANDIR=
INFODIR=
help() {
cat << EOF
Usage:
./configure [OPTION]...
Defaults for the options (based on current environment) are specified in
brackets.
Configuration:
-h, --help Print this help and exit
--disable-fast-vapi Disable the usage of Vala compilers fast-vapi
feature. fast-vapi mode is slower when doing
clean builds, but faster when doing incremental
builds (during development).
--fetch-only Only fetch the files required to run ./configure
without network access later and exit.
--no-debug Build without debug symbols
--release Configure to build an optimized release version
--with-libsignal-in-tree Build libsignal-protocol-c in tree and link it
statically.
--with-tests Also build tests.
Plugin configuration:
--enable-plugin=PLUGIN Enable compilation of plugin PLUGIN.
--disable-plugin=PLUGIN Disable compilation of plugin PLUGIN.
Installation directories:
--prefix=PREFIX Install architecture-independent files in PREFIX
[$PREFIX]
--program-prefix=PREFIX Same as --prefix
--exec-prefix= Install architecture-dependent files in EPREFIX
[PREFIX]
--lib-suffix=SUFFIX Append SUFFIX to the directory name for libraries
By default, \`make install' will install all the files in
\`/usr/local/bin', \`/usr/local/lib' etc. You can specify
an installation prefix other than \`/usr/local' using \`--prefix',
for instance \`--prefix=\$HOME'.
For better control, use the options below.
Fine tuning of the installation directories:
--bindir=DIR user executables [EPREFIX/bin]
--libdir=DIR object code libraries [EPREFIX/lib]
--includedir=DIR C header files [PREFIX/include]
--datadir=DIR read-only data [PREFIX/share]
For compatibility with autotools, these options will be silently ignored:
--host, --build, --sbindir, --sysconfdir, --libexecdir, --sharedstatedir,
--localstatedir, --mandir, --infodir
Some influential environment variables:
CC C compiler command
CFLAGS C compiler flags
PKG_CONFIG_PATH directories to add to pkg-config's search path
PKG_CONFIG_LIBDIR path overriding pkg-config's built-in search path
USE_CCACHE decide to use ccache when compiling C objects
VALAC Vala compiler command
VALACFLAGS Vala compiler flags
Use these variables to override the choices made by \`configure' or to help
it to find libraries and programs with nonstandard names/locations.
EOF
}
while true; do
case "$1" in
--prefix ) PREFIX="$2"; shift; shift ;;
--enable-plugin ) if [ -z "$ENABLED_PLUGINS" ]; then ENABLED_PLUGINS="$2"; else ENABLED_PLUGINS="$ENABLED_PLUGINS;$2"; fi; shift; shift ;;
--disable-plugin ) if [ -z "$DISABLED_PLUGINS" ]; then DISABLED_PLUGINS="$2"; else DISABLED_PLUGINS="$DISABLED_PLUGINS;$2"; fi; shift; shift ;;
--valac ) VALA_EXECUTABLE="$2"; shift; shift ;;
--valac-flags ) VALAC_FLAGS="$2"; shift; shift ;;
--lib-suffix ) LIB_SUFFIX="$2"; shift; shift ;;
--with-libsignal-in-tree ) BUILD_LIBSIGNAL_IN_TREE=yes; shift ;;
--disable-fast-vapi ) DISABLE_FAST_VAPI=yes; shift ;;
--no-debug ) NO_DEBUG=yes; shift ;;
--fetch-only ) FETCH_ONLY=yes; shift ;;
--release ) BUILD_TYPE=RelWithDebInfo; shift ;;
--with-tests ) BUILD_TESTS=yes; shift ;;
# Autotools paths
--program-prefix ) PREFIX="$2"; shift; shift ;;
--exec-prefix ) EXEC_PREFIX="$2"; shift; shift ;;
--bindir ) BINDIR="$2"; shift; shift ;;
--datadir ) DATADIR="$2"; shift; shift ;;
--includedir ) INCLUDEDIR="$2"; shift; shift ;;
--libdir ) LIBDIR="$2"; shift; shift ;;
# Autotools paths not used
--sbindir ) SBINDIR="$2"; shift; shift ;;
--sysconfdir ) SYSCONFDIR="$2"; shift; shift ;;
--libexecdir ) LIBEXECDIR="$2"; shift; shift ;;
--localstatedir ) LOCALSTATEDIR="$2"; shift; shift ;;
--sharedstatedir ) SHAREDSTATEDIR="$2"; shift; shift ;;
--mandir ) MANDIR="$2"; shift; shift ;;
--infodir ) INFODIR="$2"; shift; shift ;;
--host | --build ) shift; shift ;;
-h | --help ) help; exit 0 ;;
-- ) shift; break ;;
* ) break ;;
esac
done
if [ "$BUILD_LIBSIGNAL_IN_TREE" = "yes" ] || [ "$FETCH_ONLY" = "yes" ]; then
if [ -d ".git" ]; then
git submodule update --init 2>/dev/null
else
tmp=0
for i in $(cat .gitmodules | grep -n submodule | awk -F ':' '{print $1}') $(wc -l .gitmodules | awk '{print $1}'); do
if ! [ $tmp -eq 0 ]; then
name=$(cat .gitmodules | head -n $tmp | tail -n 1 | awk -F '"' '{print $2}')
def=$(cat .gitmodules | head -n $i | tail -n $(expr "$i" - "$tmp") | awk -F ' ' '{print $1 $2 $3}')
path=$(echo "$def" | grep '^path=' | awk -F '=' '{print $2}')
url=$(echo "$def" | grep '^url=' | awk -F '=' '{print $2}')
branch=$(echo "$def" | grep '^branch=' | awk -F '=' '{print $2}')
if ! ls "$path"/* >/dev/null 2>/dev/null; then
git=$(which git)
if ! [ $? -eq 0 ] || ! [ -x $git ]; then
echo "Failed retrieving missing files"
exit 5
fi
res=$(git clone "$url" "$path" 2>&1)
if ! [ $? -eq 0 ] || ! [ -d $path ]; then
echo "Failed retrieving missing files: $res"
exit 5
fi
if [ -n "$branch" ]; then
olddir="$(pwd)"
cd "$path"
res=$(git checkout "$branch" 2>&1)
if ! [ $? -eq 0 ]; then
echo "Failed retrieving missing files: $res"
exit 5
fi
cd "$olddir"
fi
echo "Submodule path '$path': checked out '$branch' (via git clone)"
fi
fi
tmp=$i
done
fi
fi
if [ "$FETCH_ONLY" = "yes" ]; then exit 0; fi
if [ ! -x "$(which cmake 2>/dev/null)" ]
then
echo "-!- CMake required."
exit 1
fi
ninja_bin="$(which ninja-build 2>/dev/null)"
if ! [ -x "$ninja_bin" ]; then
ninja_bin="$(which ninja 2>/dev/null)"
fi
if [ -x "$ninja_bin" ]; then
ninja_version=`$ninja_bin --version 2>/dev/null`
if [ $? -eq 0 ]; then
if [ -d build ]; then
last_ninja_version=`cat build/.ninja_version 2>/dev/null`
else
last_ninja_version=0
fi
if [ "$ninja_version" != "$last_ninja_version" ]; then
echo "-- Found Ninja: $ninja_bin (found version \"$ninja_version\")"
fi
cmake_type="Ninja"
exec_bin="$ninja_bin"
exec_command="$exec_bin"
elif [ "/usr/sbin/ninja" = "$ninja_bin" ]; then
echo "-- Ninja at $ninja_bin is not usable. Did you install 'ninja' instead of 'ninja-build'?"
fi
fi
if ! [ -x "$exec_bin" ]; then
make_bin="$(which make 2>/dev/null)"
if [ -x "$make_bin" ]; then
echo "-- Found Make: $make_bin"
cmake_type="Unix Makefiles"
exec_bin="$make_bin"
exec_command="$exec_bin"
echo "-- Running with make. Using Ninja (ninja-build) might improve build experience."
fi
fi
if ! [ -x "$exec_bin" ]; then
echo "-!- No compatible build system (Ninja, Make) found."
exit 4
fi
if [ -f ./build ]; then
echo "-!- ./build file exists. ./configure can't continue"
exit 2
fi
if [ -d build ]; then
last_type=`cat build/.cmake_type`
if [ "$cmake_type" != "$last_type" ]
then
echo "-- Using different build system, cleaning build system files"
cd build
rm -r CMakeCache.txt CMakeFiles
cd ..
fi
fi
mkdir -p build
cd build
echo "$cmake_type" > .cmake_type
echo "$ninja_version" > .ninja_version
cmake -G "$cmake_type" \
-DCMAKE_INSTALL_PREFIX="$PREFIX" \
-DCMAKE_BUILD_TYPE="$BUILD_TYPE" \
-DENABLED_PLUGINS="$ENABLED_PLUGINS" \
-DDISABLED_PLUGINS="$DISABLED_PLUGINS" \
-DBUILD_TESTS="$BUILD_TESTS" \
-DBUILD_LIBSIGNAL_IN_TREE="$BUILD_LIBSIGNAL_IN_TREE" \
-DVALA_EXECUTABLE="$VALAC" \
-DCMAKE_VALA_FLAGS="$VALACFLAGS" \
-DDISABLE_FAST_VAPI="$DISABLE_FAST_VAPI" \
-DLIB_SUFFIX="$LIB_SUFFIX" \
-DNO_DEBUG="$NO_DEBUG" \
-DEXEC_INSTALL_PREFIX="$EXEC_PREFIX" \
-DSHARE_INSTALL_PREFIX="$DATADIR" \
-DBIN_INSTALL_DIR="$BINDIR" \
-DINCLUDE_INSTALL_DIR="$INCLUDEDIR" \
-DLIB_INSTALL_DIR="$LIBDIR" \
-Wno-dev \
.. || exit 9
if [ "$cmake_type" = "Ninja" ]; then
cat << EOF > Makefile
default:
@sh -c "$exec_command"
%:
@sh -c "$exec_command \"\$@\""
EOF
fi
cd ..
cat << EOF > Makefile
default:
@sh -c "cd build; $exec_command"
distclean: clean uninstall
test: default
echo "make test not yet supported"
%:
@sh -c "cd build; $exec_command \"\$@\""
EOF
echo "-- Configured. Type 'make' to build, 'make install' to install."

View File

@ -0,0 +1,41 @@
find_package(GCrypt REQUIRED)
find_package(Srtp2 REQUIRED)
find_packages(CRYPTO_VALA_PACKAGES REQUIRED
GLib
GObject
GIO
)
vala_precompile(CRYPTO_VALA_C
SOURCES
"src/cipher.vala"
"src/cipher_converter.vala"
"src/error.vala"
"src/random.vala"
"src/srtp.vala"
CUSTOM_VAPIS
"${CMAKE_CURRENT_SOURCE_DIR}/vapi/gcrypt.vapi"
"${CMAKE_CURRENT_SOURCE_DIR}/vapi/libsrtp2.vapi"
PACKAGES
${CRYPTO_VALA_PACKAGES}
GENERATE_VAPI
crypto-vala
GENERATE_HEADER
crypto-vala
)
add_custom_target(crypto-vala-vapi
DEPENDS
${CMAKE_BINARY_DIR}/exports/crypto-vala.vapi
${CMAKE_BINARY_DIR}/exports/crypto-vala.deps
)
add_definitions(${VALA_CFLAGS} -DG_LOG_DOMAIN="crypto-vala")
add_library(crypto-vala SHARED ${CRYPTO_VALA_C})
add_dependencies(crypto-vala crypto-vala-vapi)
target_link_libraries(crypto-vala ${CRYPTO_VALA_PACKAGES} gcrypt libsrtp2)
set_target_properties(crypto-vala PROPERTIES VERSION 0.0 SOVERSION 0)
install(TARGETS crypto-vala ${TARGET_INSTALL})
install(FILES ${CMAKE_BINARY_DIR}/exports/crypto-vala.vapi ${CMAKE_BINARY_DIR}/exports/crypto-vala.deps DESTINATION ${VAPI_INSTALL_DIR})
install(FILES ${CMAKE_BINARY_DIR}/exports/crypto-vala.h DESTINATION ${INCLUDE_INSTALL_DIR})

View File

@ -1,2 +0,0 @@
gio-2.0
glib-2.0

View File

@ -1,23 +0,0 @@
dependencies = [
dep_gio,
dep_glib,
dep_libgcrypt,
dep_libsrtp2,
]
sources = files(
'src/cipher.vala',
'src/cipher_converter.vala',
'src/error.vala',
'src/random.vala',
'src/srtp.vala',
)
c_args = [
'-DG_LOG_DOMAIN="crypto-vala"',
]
vala_args = [
'--vapidir', meson.current_source_dir() / 'vapi',
]
lib_crypto_vala = library('crypto-vala', sources, c_args: c_args, vala_args: vala_args, dependencies: dependencies, version: '0.0', install: true, install_dir: [true, true, true], install_rpath: default_install_rpath)
dep_crypto_vala = declare_dependency(link_with: lib_crypto_vala, include_directories: include_directories('.'))
install_data('crypto-vala.deps', install_dir: get_option('datadir') / 'vala/vapi', install_tag: 'devel') # TODO: workaround for https://github.com/mesonbuild/meson/issues/9756

View File

@ -55,7 +55,7 @@ public class SymmetricCipherEncrypter : SymmetricCipherConverter {
}
return ConverterResult.CONVERTED;
} catch (Crypto.Error e) {
throw new IOError.FAILED(@"$(e.domain) error while encrypting: $(e.message)");
throw new IOError.FAILED(@"$(e.domain) error while decrypting: $(e.message)");
}
}
}

274
dino.doap
View File

@ -3,7 +3,7 @@
<Project>
<name>Dino</name>
<short-name>dino</short-name>
<shortdesc xml:lang="en">Modern XMPP chat client</shortdesc>
<shortdesc xml:lang="en">Modern XMPP Chat Client</shortdesc>
<shortdesc xml:lang="zh-TW">現代化的 XMPP 用戶端聊天軟件</shortdesc>
<shortdesc xml:lang="zh-CN">现代 XMPP 聊天客户端</shortdesc>
<shortdesc xml:lang="tr">Modern XMPP Sohbet İstemcisi</shortdesc>
@ -227,51 +227,40 @@
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0004.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0027.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0030.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0045.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0047.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:note>For use with XEP-0261</xmpp:note>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0048.html"/>
<xmpp:status>deprecated</xmpp:status>
<xmpp:note>Migrating to XEP-0402 if supported by server</xmpp:note>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0049.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
@ -279,179 +268,119 @@
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0054.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:note>Only for viewing avatars</xmpp:note>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0059.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:note>For use with XEP-0313</xmpp:note>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0060.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0065.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:note>For use with XEP-0260</xmpp:note>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0066.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:note>For file transfers using XEP-0363</xmpp:note>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0077.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0082.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0084.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0085.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0115.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0153.html"/>
<xmpp:status>deprecated</xmpp:status>
<xmpp:since>0.1</xmpp:since>
<xmpp:note>Only to fetch Avatars from other users</xmpp:note>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0163.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
<xmpp:status>partial</xmpp:status>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0166.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0167.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0176.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0177.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0184.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0191.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
<xmpp:status>partial</xmpp:status>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0198.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0199.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0203.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0215.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0222.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0223.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0234.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
@ -459,84 +388,43 @@
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0245.html"/>
<xmpp:version>1.0</xmpp:version>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0249.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:note>No support for sending</xmpp:note>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0260.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:version>1.0.3</xmpp:version>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0261.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:version>1.0</xmpp:version>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0272.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:version>0.2.0</xmpp:version>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0280.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:version>1.0.1</xmpp:version>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0293.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:version>1.0.2</xmpp:version>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0294.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:version>1.1.2</xmpp:version>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0297.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:version>1.0</xmpp:version>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0298.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0308.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:version>1.2.0</xmpp:version>
<xmpp:since>0.2</xmpp:since>
</xmpp:SupportedXep>
@ -545,67 +433,45 @@
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0313.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:since>0.1</xmpp:since>
<xmpp:note>Not for MUCs</xmpp:note>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0320.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:version>1.0.0</xmpp:version>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0333.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0334.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0353.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:version>0.3.1</xmpp:version>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0359.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0363.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0367.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:version>0.3</xmpp:version>
<xmpp:since>0.5</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0368.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:version>1.1.0</xmpp:version>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
@ -613,160 +479,28 @@
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0380.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:note>Only for outgoing messages</xmpp:note>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0384.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:version>0.3.0</xmpp:version>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0391.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:version>0.1.2</xmpp:version>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0392.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:version>1.0.0</xmpp:version>
<xmpp:since>0.5</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0393.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:version>1.1.1</xmpp:version>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0394.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:version>0.3.0</xmpp:version>
<xmpp:since>0.5</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0396.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0398.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0402.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:version>1.2.0</xmpp:version>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0410.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.2</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0421.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:version>0.2.0</xmpp:version>
<xmpp:since>0.4</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0426.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:version>0.3.0</xmpp:version>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0428.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:version>0.2.1</xmpp:version>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0444.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:version>0.1.1</xmpp:version>
<xmpp:since>0.4</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0446.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:version>0.2.0</xmpp:version>
<xmpp:since>0.5</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0447.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:version>0.3.0</xmpp:version>
<xmpp:since>0.5</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0453.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:version>0.1.2</xmpp:version>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0454.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:note>No support for embedded thumbnails</xmpp:note>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0461.html"/>
<xmpp:status>complete</xmpp:status>
<xmpp:version>0.2.0</xmpp:version>
<xmpp:since>0.4</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0482.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:version>0.1.0</xmpp:version>
<xmpp:since>0.5</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0486.html"/>
<xmpp:status>partial</xmpp:status>
<xmpp:version>0.1.0</xmpp:version>
</xmpp:SupportedXep>
</implements>
</Project>

View File

@ -9,7 +9,7 @@
<name>Dino</name>
<short-name>dino</short-name>
<shortdesc xml:lang="en">Modern XMPP chat client</shortdesc>
<shortdesc xml:lang="en">Modern XMPP Chat Client</shortdesc>
<description xml:lang="en">
Dino is a modern open-source chat client for the desktop. It focuses on providing a clean and reliable Jabber/XMPP experience while having your privacy in mind.
It supports end-to-end encryption with OMEMO and OpenPGP and allows configuring privacy-related features such as read receipts and typing notifications.
@ -47,51 +47,40 @@
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0004.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0027.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0030.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0045.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0047.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:note>For use with XEP-0261</xmpp:note>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0048.html" />
<xmpp:status>deprecated</xmpp:status>
<xmpp:note>Migrating to XEP-0402 if supported by server</xmpp:note>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0049.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
@ -99,179 +88,119 @@
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0054.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:note>Only for viewing avatars</xmpp:note>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0059.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:note>For use with XEP-0313</xmpp:note>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0060.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0065.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:note>For use with XEP-0260</xmpp:note>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0066.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:note>For file transfers using XEP-0363</xmpp:note>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0077.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0082.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0084.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0085.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0115.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0153.html" />
<xmpp:status>deprecated</xmpp:status>
<xmpp:since>0.1</xmpp:since>
<xmpp:note>Only to fetch Avatars from other users</xmpp:note>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0163.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
<xmpp:status>partial</xmpp:status>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0166.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0167.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0176.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0177.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0184.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0191.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
<xmpp:status>partial</xmpp:status>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0198.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0199.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0203.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0215.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0222.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0223.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0234.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
@ -279,84 +208,43 @@
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0245.html" />
<xmpp:version>1.0</xmpp:version>
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0249.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:note>No support for sending</xmpp:note>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0260.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:version>1.0.3</xmpp:version>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0261.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:version>1.0</xmpp:version>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0272.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:version>0.2.0</xmpp:version>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0280.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:version>1.0.1</xmpp:version>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0293.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:version>1.0.2</xmpp:version>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0294.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:version>1.1.2</xmpp:version>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0297.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:version>1.0</xmpp:version>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0298.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0308.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:version>1.2.0</xmpp:version>
<xmpp:since>0.2</xmpp:since>
</xmpp:SupportedXep>
@ -365,67 +253,45 @@
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0313.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:since>0.1</xmpp:since>
<xmpp:note>Not for MUCs</xmpp:note>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0320.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:version>1.0.0</xmpp:version>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0333.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0334.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0353.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:version>0.3.1</xmpp:version>
<xmpp:since>0.3</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0359.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0363.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0367.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:version>0.3</xmpp:version>
<xmpp:since>0.5</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0368.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:version>1.1.0</xmpp:version>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
@ -433,160 +299,28 @@
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0380.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:note>Only for outgoing messages</xmpp:note>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0384.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:version>0.3.0</xmpp:version>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0391.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:version>0.1.2</xmpp:version>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0392.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:version>1.0.0</xmpp:version>
<xmpp:since>0.5</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0393.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:version>1.1.1</xmpp:version>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0394.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:version>0.3.0</xmpp:version>
<xmpp:since>0.5</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0396.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0398.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0402.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:version>1.2.0</xmpp:version>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0410.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:since>0.2</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0421.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:version>0.2.0</xmpp:version>
<xmpp:since>0.4</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0426.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:version>0.3.0</xmpp:version>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0428.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:version>0.2.1</xmpp:version>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0444.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:version>0.1.1</xmpp:version>
<xmpp:since>0.4</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0446.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:version>0.2.0</xmpp:version>
<xmpp:since>0.5</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0447.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:version>0.3.0</xmpp:version>
<xmpp:since>0.5</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0453.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:version>0.1.2</xmpp:version>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0454.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:note>No support for embedded thumbnails</xmpp:note>
<xmpp:since>0.1</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0461.html" />
<xmpp:status>complete</xmpp:status>
<xmpp:version>0.2.0</xmpp:version>
<xmpp:since>0.4</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0482.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:version>0.1.0</xmpp:version>
<xmpp:since>0.5</xmpp:since>
</xmpp:SupportedXep>
</implements>
<implements>
<xmpp:SupportedXep>
<xmpp:xep rdf:resource="https://xmpp.org/extensions/xep-0486.html" />
<xmpp:status>partial</xmpp:status>
<xmpp:version>0.1.0</xmpp:version>
</xmpp:SupportedXep>
</implements>
</Project>

View File

@ -1,133 +0,0 @@
{
"id": "im.dino.Dino",
"runtime": "org.gnome.Platform",
"runtime-version": "48",
"sdk": "org.gnome.Sdk",
"command": "dino",
"finish-args": [
"--share=ipc",
"--socket=fallback-x11",
"--socket=wayland",
"--socket=pulseaudio",
"--socket=gpg-agent",
"--filesystem=xdg-run/pipewire-0",
"--share=network",
"--device=dri",
"--talk-name=org.freedesktop.Notifications"
],
"modules": [
{
"name": "protobuf",
"buildsystem": "cmake-ninja",
"cleanup": [
"*"
],
"config-opts": [
"-Dprotobuf_BUILD_TESTS=OFF",
"-Dprotobuf_BUILD_LIBUPB=OFF"
],
"sources": [
{
"type": "archive",
"url": "https://github.com/protocolbuffers/protobuf/releases/download/v30.2/protobuf-30.2.tar.gz",
"sha512": "555d1b18d175eeaf17f3879f124d33080f490367840d35b34bfc4e4a5b383bf6a1d09f1570acb6af9c53ac4940a14572d46423b6e3dd0c712e7802c986fb6be6",
"x-checker-data": {
"type": "anitya",
"project-id": 3715,
"stable-only": true,
"url-template": "https://github.com/protocolbuffers/protobuf/releases/download/v$version/protobuf-$version.tar.gz"
}
}
]
},
{
"name": "libprotobuf-c",
"buildsystem": "autotools",
"config-opts": [
"CFLAGS=-fPIC"
],
"post-install": [
"rm /app/lib/*.so"
],
"cleanup": [
"*"
],
"sources": [
{
"type": "archive",
"url": "https://github.com/protobuf-c/protobuf-c/releases/download/v1.5.2/protobuf-c-1.5.2.tar.gz",
"sha512": "78dc72988d7e8232c1b967849aa00939bc05ab7d39b86a8e2af005e38aa4ef4c9b03920d51fb5337399d980e65f35d11bd4742bea745a893ecc909f56a51c9ac",
"x-checker-data": {
"type": "anitya",
"project-id": 3716,
"stable-only": true,
"url-template": "https://github.com/protobuf-c/protobuf-c/releases/download/v$version/protobuf-c-$version.tar.gz"
}
}
]
},
{
"name": "libomemo-c",
"buildsystem": "meson",
"cleanup": [
"/lib/pkgconfig",
"/include"
],
"config-opts": [
"-Dtests=false",
"-Ddefault_library=static"
],
"sources": [
{
"type": "archive",
"url": "https://github.com/dino/libomemo-c/releases/download/v0.5.1/libomemo-c-0.5.1.tar.gz",
"sha512": "ff59565406c51663f2944e9a7c12c5b0e3fa01073039f5161472dd81f59194b1cf2685bc1e0cc930a141bc409b965c5d93313cfc3e0e237250102af3b5e88826",
"x-checker-data": {
"type": "anitya",
"project-id": 359676,
"stable-only": true,
"url-template": "https://github.com/dino/libomemo-c/releases/download/v$version/libomemo-c-$version.tar.gz"
}
}
]
},
{
"name": "qrencode",
"buildsystem": "cmake-ninja",
"cleanup": [
"*"
],
"config-opts": [
"-DCMAKE_C_FLAGS=-fPIC"
],
"sources": [
{
"type": "archive",
"url": "https://github.com/fukuchi/libqrencode/archive/refs/tags/v4.1.1.tar.gz",
"sha512": "584106e7bcaaa1ef2efe63d653daad38d4ff436eb4b185a1db3c747169c1ffa74149c3b1329bb0b8ae007903db0a7034aabf135cc196d91a37b5c61348154a65",
"x-checker-data": {
"type": "anitya",
"project-id": 12834,
"stable-only": true,
"url-template": "https://github.com/fukuchi/libqrencode/archive/refs/tags/v$version.tar.gz"
}
}
]
},
{
"name": "dino",
"buildsystem": "meson",
"builddir": true,
"cleanup": [
"/include",
"/share/vala"
],
"sources": [
{
"type": "dir",
"path": "."
}
]
}
]
}

121
libdino/CMakeLists.txt Normal file
View File

@ -0,0 +1,121 @@
find_packages(LIBDINO_PACKAGES REQUIRED
GDKPixbuf2
Gee
GLib
GModule
GObject
)
vala_precompile(LIBDINO_VALA_C
SOURCES
src/application.vala
src/dbus/login1.vala
src/dbus/notifications.vala
src/dbus/upower.vala
src/entity/account.vala
src/entity/call.vala
src/entity/conversation.vala
src/entity/encryption.vala
src/entity/file_transfer.vala
src/entity/message.vala
src/entity/settings.vala
src/plugin/interfaces.vala
src/plugin/loader.vala
src/plugin/registry.vala
src/service/avatar_manager.vala
src/service/blocking_manager.vala
src/service/call_store.vala
src/service/call_state.vala
src/service/call_peer_state.vala
src/service/calls.vala
src/service/chat_interaction.vala
src/service/connection_manager.vala
src/service/content_item_store.vala
src/service/conversation_manager.vala
src/service/counterpart_interaction_manager.vala
src/service/database.vala
src/service/entity_capabilities_storage.vala
src/service/entity_info.vala
src/service/file_manager.vala
src/service/file_transfer_storage.vala
src/service/jingle_file_transfers.vala
src/service/message_correction.vala
src/service/message_processor.vala
src/service/message_storage.vala
src/service/module_manager.vala
src/service/muc_manager.vala
src/service/notification_events.vala
src/service/presence_manager.vala
src/service/registration.vala
src/service/roster_manager.vala
src/service/search_processor.vala
src/service/stream_interactor.vala
src/service/util.vala
src/util/display_name.vala
src/util/util.vala
src/util/weak_map.vala
CUSTOM_VAPIS
"${CMAKE_BINARY_DIR}/exports/xmpp-vala.vapi"
"${CMAKE_BINARY_DIR}/exports/qlite.vapi"
CUSTOM_DEPS
xmpp-vala
qlite
PACKAGES
${LIBDINO_PACKAGES}
GENERATE_VAPI
dino
GENERATE_HEADER
dino
)
add_custom_command(OUTPUT "${CMAKE_BINARY_DIR}/exports/dino_i18n.h"
COMMAND
cp "${CMAKE_CURRENT_SOURCE_DIR}/src/dino_i18n.h" "${CMAKE_BINARY_DIR}/exports/dino_i18n.h"
DEPENDS
"${CMAKE_CURRENT_SOURCE_DIR}/src/dino_i18n.h"
COMMENT
Copy header file dino_i18n.h
)
add_custom_target(dino-vapi
DEPENDS
${CMAKE_BINARY_DIR}/exports/dino.vapi
${CMAKE_BINARY_DIR}/exports/dino.deps
${CMAKE_BINARY_DIR}/exports/dino_i18n.h
)
add_definitions(${VALA_CFLAGS} -DDINO_SYSTEM_PLUGIN_DIR="${PLUGIN_INSTALL_DIR}" -DDINO_SYSTEM_LIBDIR_NAME="${LIBDIR_NAME}" -DG_LOG_DOMAIN="libdino")
add_library(libdino SHARED ${LIBDINO_VALA_C} ${CMAKE_BINARY_DIR}/exports/dino_i18n.h)
add_dependencies(libdino dino-vapi)
target_link_libraries(libdino xmpp-vala qlite ${LIBDINO_PACKAGES} m)
set_target_properties(libdino PROPERTIES PREFIX "" VERSION 0.0 SOVERSION 0)
install(TARGETS libdino ${TARGET_INSTALL})
install(FILES ${CMAKE_BINARY_DIR}/exports/dino.vapi ${CMAKE_BINARY_DIR}/exports/dino.deps DESTINATION ${VAPI_INSTALL_DIR})
install(FILES ${CMAKE_BINARY_DIR}/exports/dino.h ${CMAKE_BINARY_DIR}/exports/dino_i18n.h DESTINATION ${INCLUDE_INSTALL_DIR})
if(BUILD_TESTS)
vala_precompile(LIBDINO_TEST_VALA_C
SOURCES
"tests/weak_map.vala"
"tests/testcase.vala"
"tests/common.vala"
CUSTOM_VAPIS
${CMAKE_BINARY_DIR}/exports/dino_internal.vapi
${CMAKE_BINARY_DIR}/exports/xmpp-vala.vapi
${CMAKE_BINARY_DIR}/exports/qlite.vapi
PACKAGES
${LIBDINO_PACKAGES}
OPTIONS
${LIBDINO_EXTRA_OPTIONS}
)
add_definitions(${VALA_CFLAGS})
add_executable(libdino-test ${LIBDINO_TEST_VALA_C})
target_link_libraries(libdino-test libdino)
endif(BUILD_TESTS)

View File

@ -1,6 +0,0 @@
gdk-pixbuf-2.0
gee-0.8
glib-2.0
gmodule-2.0
qlite
xmpp-vala

View File

@ -1,92 +0,0 @@
# version_vala
dot_git = meson.current_source_dir() / '../.git'
version_file = meson.current_source_dir() / '../VERSION'
command = [prog_python, files('version.py'), version_file, '--git-repo', meson.current_source_dir()]
if prog_git.found()
command += ['--git', prog_git]
endif
version_vala = vcs_tag(command: command, input: 'src/version.vala.in', output: 'version.vala', replace_string: '%VERSION%')
# libdino
dependencies = [
dep_gdk_pixbuf,
dep_gee,
dep_gio,
dep_glib,
dep_gmodule,
dep_qlite,
dep_xmpp_vala
]
sources = files(
'src/application.vala',
'src/dbus/login1.vala',
'src/dbus/notifications.vala',
'src/dbus/upower.vala',
'src/entity/account.vala',
'src/entity/call.vala',
'src/entity/conversation.vala',
'src/entity/encryption.vala',
'src/entity/file_transfer.vala',
'src/entity/message.vala',
'src/entity/settings.vala',
'src/plugin/interfaces.vala',
'src/plugin/loader.vala',
'src/plugin/registry.vala',
'src/service/avatar_manager.vala',
'src/service/blocking_manager.vala',
'src/service/call_store.vala',
'src/service/call_state.vala',
'src/service/call_peer_state.vala',
'src/service/calls.vala',
'src/service/chat_interaction.vala',
'src/service/connection_manager.vala',
'src/service/contact_model.vala',
'src/service/content_item_store.vala',
'src/service/conversation_manager.vala',
'src/service/counterpart_interaction_manager.vala',
'src/service/database.vala',
'src/service/entity_capabilities_storage.vala',
'src/service/entity_info.vala',
'src/service/fallback_body.vala',
'src/service/file_manager.vala',
'src/service/file_transfer_storage.vala',
'src/service/history_sync.vala',
'src/service/jingle_file_transfers.vala',
'src/service/message_correction.vala',
'src/service/message_processor.vala',
'src/service/message_storage.vala',
'src/service/module_manager.vala',
'src/service/muc_manager.vala',
'src/service/notification_events.vala',
'src/service/presence_manager.vala',
'src/service/replies.vala',
'src/service/reactions.vala',
'src/service/registration.vala',
'src/service/roster_manager.vala',
'src/service/search_processor.vala',
'src/service/sfs_metadata.vala',
'src/service/stateless_file_sharing.vala',
'src/service/stream_interactor.vala',
'src/service/util.vala',
'src/util/display_name.vala',
'src/util/limit_input_stream.vala',
'src/util/send_message.vala',
'src/util/util.vala',
'src/util/weak_map.vala',
'src/util/weak_timeout.vala',
)
sources += [version_vala]
c_args = [
'-DDINO_SYSTEM_LIBDIR_NAME="@0@"'.format(get_option('prefix') / get_option('libdir')),
'-DDINO_SYSTEM_PLUGIN_DIR="@0@"'.format(get_option('prefix') / get_option('libdir') / get_option('plugindir')),
'-DG_LOG_DOMAIN="libdino"',
]
vala_args = []
if meson.get_compiler('vala').version().version_compare('=0.56.11')
vala_args += ['-D', 'VALA_0_56_11']
endif
lib_dino = library('libdino', sources, c_args: c_args, vala_args: vala_args, include_directories: include_directories('src'), dependencies: dependencies, name_prefix: '', version: '0.0', install: true, install_dir: [true, true, true], install_rpath: default_install_rpath)
dep_dino = declare_dependency(link_with: lib_dino, include_directories: include_directories('.', 'src'))
install_data('dino.deps', install_dir: get_option('datadir') / 'vala/vapi', install_tag: 'devel') # TODO: workaround for https://github.com/mesonbuild/meson/issues/9756
install_headers('src/dino_i18n.h')

View File

@ -1,12 +1,7 @@
using Dino.Entities;
namespace Dino {
public string get_version() { return VERSION; }
public string get_short_version() {
if (!VERSION.contains("~")) return VERSION;
return VERSION.split("~")[0] + "+";
}
extern const string VERSION;
public interface Application : GLib.Application {
@ -39,12 +34,12 @@ public interface Application : GLib.Application {
PresenceManager.start(stream_interactor);
CounterpartInteractionManager.start(stream_interactor);
BlockingManager.start(stream_interactor);
Calls.start(stream_interactor, db);
ConversationManager.start(stream_interactor, db);
MucManager.start(stream_interactor);
AvatarManager.start(stream_interactor, db);
RosterManager.start(stream_interactor, db);
FileManager.start(stream_interactor, db);
Calls.start(stream_interactor, db);
CallStore.start(stream_interactor, db);
ContentItemStore.start(stream_interactor, db);
ChatInteraction.start(stream_interactor);
@ -54,17 +49,15 @@ public interface Application : GLib.Application {
EntityInfo.start(stream_interactor, db);
MessageCorrection.start(stream_interactor, db);
FileTransferStorage.start(stream_interactor, db);
Reactions.start(stream_interactor, db);
Replies.start(stream_interactor, db);
FallbackBody.start(stream_interactor, db);
ContactModels.start(stream_interactor);
StatelessFileSharing.start(stream_interactor, db);
create_actions();
startup.connect(() => {
stream_interactor.connection_manager.log_options = print_xmpp;
restore();
Idle.add(() => {
restore();
return false;
});
});
shutdown.connect(() => {
stream_interactor.connection_manager.make_offline_all();

View File

@ -8,30 +8,38 @@ public class Account : Object {
public int id { get; set; }
public string localpart { get { return full_jid.localpart; } }
public string domainpart { get { return full_jid.domainpart; } }
public string resourcepart {
get { return full_jid.resourcepart; }
private set { full_jid.resourcepart = value; }
}
public string resourcepart { get { return full_jid.resourcepart;} }
public Jid bare_jid { owned get { return full_jid.bare_jid; } }
public Jid full_jid { get; private set; }
public string? password { get; set; }
public string display_name {
owned get { return (alias != null && alias.length > 0) ? alias.dup() : bare_jid.to_string(); }
owned get { return alias ?? bare_jid.to_string(); }
}
public string? alias { get; set; }
public bool enabled { get; set; default = false; }
public string? roster_version { get; set; }
public DateTime mam_earliest_synced { get; set; default=new DateTime.from_unix_utc(0); }
private Database? db;
public Account(Jid bare_jid, string password) {
public Account(Jid bare_jid, string? resourcepart, string? password, string? alias) {
this.id = -1;
try {
this.full_jid = bare_jid.with_resource(get_random_resource());
} catch (InvalidJidError e) {
error("Auto-generated resource was invalid (%s)", e.message);
if (resourcepart != null) {
try {
this.full_jid = bare_jid.with_resource(resourcepart);
} catch (InvalidJidError e) {
warning("Tried to create account with invalid resource (%s), defaulting to auto generated", e.message);
}
}
if (this.full_jid == null) {
try {
this.full_jid = bare_jid.with_resource("dino." + Random.next_int().to_string("%x"));
} catch (InvalidJidError e) {
error("Auto-generated resource was invalid (%s)", e.message);
}
}
this.password = password;
this.alias = alias;
}
public Account.from_row(Database db, Qlite.Row row) throws InvalidJidError {
@ -42,6 +50,7 @@ public class Account : Object {
alias = row[db.account.alias];
enabled = row[db.account.enabled];
roster_version = row[db.account.roster_version];
mam_earliest_synced = new DateTime.from_unix_utc(row[db.account.mam_earliest_synced]);
notify.connect(on_update);
}
@ -57,6 +66,7 @@ public class Account : Object {
.value(db.account.alias, alias)
.value(db.account.enabled, enabled)
.value(db.account.roster_version, roster_version)
.value(db.account.mam_earliest_synced, (long)mam_earliest_synced.to_unix())
.perform();
notify.connect(on_update);
@ -69,14 +79,6 @@ public class Account : Object {
db = null;
}
public void set_random_resource() {
this.resourcepart = get_random_resource();
}
private static string get_random_resource() {
return "dino." + Random.next_int().to_string("%x");
}
public bool equals(Account acc) {
return equals_func(this, acc);
}
@ -104,6 +106,8 @@ public class Account : Object {
update.set(db.account.enabled, enabled); break;
case "roster-version":
update.set(db.account.roster_version, roster_version); break;
case "mam-earliest-synced":
update.set(db.account.mam_earliest_synced, (long)mam_earliest_synced.to_unix()); break;
}
update.perform();
}

View File

@ -22,7 +22,6 @@ public class Conversation : Object {
public Jid counterpart { get; private set; }
public string? nickname { get; set; }
public bool active { get; set; default = false; }
public DateTime active_last_changed { get; private set; }
private DateTime? _last_active;
public DateTime? last_active {
get { return _last_active; }
@ -33,7 +32,7 @@ public class Conversation : Object {
}
}
}
public Encryption encryption { get; set; default = Encryption.UNKNOWN; }
public Encryption encryption { get; set; default = Encryption.NONE; }
public Message? read_up_to { get; set; }
public int read_up_to_item { get; set; default=-1; }
@ -42,9 +41,8 @@ public class Conversation : Object {
public enum Setting { DEFAULT, ON, OFF }
public Setting send_typing { get; set; default = Setting.DEFAULT; }
public Setting send_marker { get; set; default = Setting.DEFAULT; }
public int pinned { get; set; default = 0; }
public Setting send_marker { get; set; default = Setting.DEFAULT; }
private Database? db;
@ -65,7 +63,6 @@ public class Conversation : Object {
if (type_ == Conversation.Type.GROUPCHAT_PM) counterpart = counterpart.with_resource(resource);
nickname = type_ == Conversation.Type.GROUPCHAT ? resource : null;
active = row[db.conversation.active];
active_last_changed = new DateTime.from_unix_utc(row[db.conversation.active_last_changed]);
int64? last_active = row[db.conversation.last_active];
if (last_active != null) this.last_active = new DateTime.from_unix_utc(last_active);
encryption = (Encryption) row[db.conversation.encryption];
@ -75,26 +72,21 @@ public class Conversation : Object {
notify_setting = (NotifySetting) row[db.conversation.notification];
send_typing = (Setting) row[db.conversation.send_typing];
send_marker = (Setting) row[db.conversation.send_marker];
pinned = row[db.conversation.pinned];
notify.connect(on_update);
}
public void persist(Database db) {
this.db = db;
this.active_last_changed = new DateTime.now_utc();
var insert = db.conversation.insert()
.value(db.conversation.account_id, account.id)
.value(db.conversation.jid_id, db.get_jid_id(counterpart))
.value(db.conversation.type_, type_)
.value(db.conversation.encryption, encryption)
.value(db.conversation.active, active)
.value(db.conversation.active_last_changed, (long) active_last_changed.to_unix())
.value(db.conversation.notification, notify_setting)
.value(db.conversation.send_typing, send_typing)
.value(db.conversation.send_marker, send_marker)
.value(db.conversation.pinned, pinned);
.value(db.conversation.send_marker, send_marker);
if (read_up_to != null) {
insert.value(db.conversation.read_up_to, read_up_to.id);
}
@ -184,9 +176,7 @@ public class Conversation : Object {
case "nickname":
update.set(db.conversation.resource, nickname); break;
case "active":
update.set(db.conversation.active, active);
update.set(db.conversation.active_last_changed, (long) new DateTime.now_utc().to_unix());
break;
update.set(db.conversation.active, active); break;
case "last-active":
if (last_active != null) {
update.set(db.conversation.last_active, (long) last_active.to_unix());
@ -200,8 +190,6 @@ public class Conversation : Object {
update.set(db.conversation.send_typing, send_typing); break;
case "send-marker":
update.set(db.conversation.send_marker, send_marker); break;
case "pinned":
update.set(db.conversation.pinned, pinned); break;
}
update.perform();
}

View File

@ -1,16 +1,12 @@
namespace Dino.Entities {
public enum Encryption {
public enum Encryption {
NONE,
PGP,
OMEMO,
DTLS_SRTP,
SRTP,
UNKNOWN;
public bool is_some() {
return this != NONE;
}
}
UNKNOWN,
}
}

View File

@ -4,8 +4,6 @@ namespace Dino.Entities {
public class FileTransfer : Object {
public signal void sources_changed();
public const bool DIRECTION_SENT = true;
public const bool DIRECTION_RECEIVED = false;
@ -17,7 +15,6 @@ public class FileTransfer : Object {
}
public int id { get; set; default=-1; }
public string? file_sharing_id { get; set; }
public Account account { get; set; }
public Jid counterpart { get; set; }
public Jid ourpart { get; set; }
@ -67,51 +64,12 @@ public class FileTransfer : Object {
}
public string path { get; set; }
public string? mime_type { get; set; }
public int64 size { get; set; }
// TODO(hrxi): expand to 64 bit
public int size { get; set; default=-1; }
public State state { get; set; default=State.NOT_STARTED; }
public int provider { get; set; }
public string info { get; set; }
public Cancellable cancellable { get; default=new Cancellable(); }
// This value is not persisted
public int64 transferred_bytes { get; set; }
public Xep.FileMetadataElement.FileMetadata file_metadata {
owned get {
return new Xep.FileMetadataElement.FileMetadata() {
name = this.file_name,
mime_type = this.mime_type,
size = this.size,
desc = this.desc,
date = this.modification_date,
width = this.width,
height = this.height,
length = this.length,
hashes = this.hashes,
thumbnails = this.thumbnails
};
}
set {
this.file_name = value.name;
this.mime_type = value.mime_type;
this.size = value.size;
this.desc = value.desc;
this.modification_date = value.date;
this.width = value.width;
this.height = value.height;
this.length = value.length;
this.hashes = value.hashes;
this.thumbnails = value.thumbnails;
}
}
public string? desc { get; set; }
public DateTime? modification_date { get; set; }
public int width { get; set; default=-1; }
public int height { get; set; default=-1; }
public int64 length { get; set; default=-1; }
public Gee.List<Xep.CryptographicHashes.Hash> hashes = new Gee.ArrayList<Xep.CryptographicHashes.Hash>();
public Gee.List<Xep.StatelessFileSharing.Source> sfs_sources = new Gee.ArrayList<Xep.StatelessFileSharing.Source>(Xep.StatelessFileSharing.Source.equals_func);
public Gee.List<Xep.JingleContentThumbnails.Thumbnail> thumbnails = new Gee.ArrayList<Xep.JingleContentThumbnails.Thumbnail>();
private Database? db;
private string storage_dir;
@ -121,7 +79,6 @@ public class FileTransfer : Object {
this.storage_dir = storage_dir;
id = row[db.file_transfer.id];
file_sharing_id = row[db.file_transfer.file_sharing_id];
account = db.get_account_by_id(row[db.file_transfer.account_id]); // TODO dont have to generate acc new
counterpart = db.get_jid_by_id(row[db.file_transfer.counterpart_id]);
@ -141,37 +98,10 @@ public class FileTransfer : Object {
file_name = row[db.file_transfer.file_name];
path = row[db.file_transfer.path];
mime_type = row[db.file_transfer.mime_type];
size = (int64) row[db.file_transfer.size];
size = row[db.file_transfer.size];
state = (State) row[db.file_transfer.state];
provider = row[db.file_transfer.provider];
info = row[db.file_transfer.info];
modification_date = new DateTime.from_unix_utc(row[db.file_transfer.modification_date]);
width = row[db.file_transfer.width];
height = row[db.file_transfer.height];
length = (int64) row[db.file_transfer.length];
// TODO put those into the initial query
foreach(var hash_row in db.file_hashes.select().with(db.file_hashes.id, "=", id)) {
Xep.CryptographicHashes.Hash hash = new Xep.CryptographicHashes.Hash();
hash.algo = hash_row[db.file_hashes.algo];
hash.val = hash_row[db.file_hashes.value];
hashes.add(hash);
}
foreach(var thumbnail_row in db.file_thumbnails.select().with(db.file_thumbnails.id, "=", id)) {
Xep.JingleContentThumbnails.Thumbnail thumbnail = new Xep.JingleContentThumbnails.Thumbnail();
thumbnail.data = Xmpp.get_data_for_uri(thumbnail_row[db.file_thumbnails.uri]);
thumbnail.media_type = thumbnail_row[db.file_thumbnails.mime_type];
thumbnail.width = thumbnail_row[db.file_thumbnails.width];
thumbnail.height = thumbnail_row[db.file_thumbnails.height];
thumbnails.add(thumbnail);
}
foreach(Qlite.Row source_row in db.sfs_sources.select().with(db.sfs_sources.file_transfer_id, "=", id)) {
if (source_row[db.sfs_sources.type] == "http") {
sfs_sources.add(new Xep.StatelessFileSharing.HttpSource() { url=source_row[db.sfs_sources.data] });
}
}
notify.connect(on_update);
}
@ -190,79 +120,26 @@ public class FileTransfer : Object {
.value(db.file_transfer.local_time, (long) local_time.to_unix())
.value(db.file_transfer.encryption, encryption)
.value(db.file_transfer.file_name, file_name)
.value(db.file_transfer.size, (long) size)
.value(db.file_transfer.size, size)
.value(db.file_transfer.state, state)
.value(db.file_transfer.provider, provider)
.value(db.file_transfer.info, info);
if (file_sharing_id != null) builder.value(db.file_transfer.file_sharing_id, file_sharing_id);
if (file_name != null) builder.value(db.file_transfer.file_name, file_name);
if (path != null) builder.value(db.file_transfer.path, path);
if (mime_type != null) builder.value(db.file_transfer.mime_type, mime_type);
if (path != null) builder.value(db.file_transfer.path, path);
if (modification_date != null) builder.value(db.file_transfer.modification_date, (long) modification_date.to_unix());
if (width != -1) builder.value(db.file_transfer.width, width);
if (height != -1) builder.value(db.file_transfer.height, height);
if (length != -1) builder.value(db.file_transfer.length, (long) length);
id = (int) builder.perform();
foreach (Xep.CryptographicHashes.Hash hash in hashes) {
db.file_hashes.insert()
.value(db.file_hashes.id, id)
.value(db.file_hashes.algo, hash.algo)
.value(db.file_hashes.value, hash.val)
.perform();
}
foreach (Xep.JingleContentThumbnails.Thumbnail thumbnail in thumbnails) {
string data_uri = "data:image/png;base64," + Base64.encode(thumbnail.data.get_data());
db.file_thumbnails.insert()
.value(db.file_thumbnails.id, id)
.value(db.file_thumbnails.uri, data_uri)
.value(db.file_thumbnails.mime_type, thumbnail.media_type)
.value(db.file_thumbnails.width, thumbnail.width)
.value(db.file_thumbnails.height, thumbnail.height)
.perform();
}
foreach (Xep.StatelessFileSharing.Source source in sfs_sources) {
persist_source(source);
}
notify.connect(on_update);
}
public void add_sfs_source(Xep.StatelessFileSharing.Source source) {
if (sfs_sources.contains(source)) return; // Don't add the same source twice. Might happen due to MAM and lacking deduplication.
sfs_sources.add(source);
if (id != -1) {
persist_source(source);
}
sources_changed();
}
private void persist_source(Xep.StatelessFileSharing.Source source) {
Xep.StatelessFileSharing.HttpSource? http_source = source as Xep.StatelessFileSharing.HttpSource;
if (http_source != null) {
db.sfs_sources.insert()
.value(db.sfs_sources.file_transfer_id, id)
.value(db.sfs_sources.type, "http")
.value(db.sfs_sources.data, http_source.url)
.perform();
}
}
public File? get_file() {
if (path == null) return null;
public File get_file() {
return File.new_for_path(Path.build_filename(Dino.get_storage_dir(), "files", path));
}
private void on_update(Object o, ParamSpec sp) {
Qlite.UpdateBuilder update_builder = db.file_transfer.update().with(db.file_transfer.id, "=", id);
switch (sp.name) {
case "file-sharing-id":
update_builder.set(db.file_transfer.file_sharing_id, file_sharing_id); break;
case "counterpart":
update_builder.set(db.file_transfer.counterpart_id, db.get_jid_id(counterpart));
update_builder.set(db.file_transfer.counterpart_resource, counterpart.resourcepart); break;
@ -283,7 +160,7 @@ public class FileTransfer : Object {
case "mime-type":
update_builder.set(db.file_transfer.mime_type, mime_type); break;
case "size":
update_builder.set(db.file_transfer.size, (long) size); break;
update_builder.set(db.file_transfer.size, size); break;
case "state":
if (state == State.IN_PROGRESS) return;
update_builder.set(db.file_transfer.state, state); break;
@ -291,14 +168,6 @@ public class FileTransfer : Object {
update_builder.set(db.file_transfer.provider, provider); break;
case "info":
update_builder.set(db.file_transfer.info, info); break;
case "modification-date":
update_builder.set(db.file_transfer.modification_date, (long) modification_date.to_unix()); break;
case "width":
update_builder.set(db.file_transfer.width, width); break;
case "height":
update_builder.set(db.file_transfer.height, height); break;
case "length":
update_builder.set(db.file_transfer.length, (long) length); break;
}
update_builder.perform();
}

View File

@ -67,10 +67,6 @@ public class Message : Object {
}
}
public string? edit_to = null;
public int quoted_item_id { get; private set; default=0; }
private Gee.List<Xep.FallbackIndication.Fallback> fallbacks = null;
private Gee.List<Xep.MessageMarkup.Span> markups = null;
private Database? db;
@ -109,7 +105,6 @@ public class Message : Object {
if (real_jid_str != null) real_jid = new Jid(real_jid_str);
edit_to = row[db.message_correction.to_stanza_id];
quoted_item_id = row[db.reply.quoted_content_item_id];
notify.connect(on_update);
}
@ -143,103 +138,6 @@ public class Message : Object {
notify.connect(on_update);
}
public void set_quoted_item(int quoted_content_item_id) {
if (id == -1) {
warning("Message needs to be persisted before setting quoted item");
return;
}
this.quoted_item_id = quoted_content_item_id;
db.reply.upsert()
.value(db.reply.message_id, id, true)
.value(db.reply.quoted_content_item_id, quoted_content_item_id)
.value_null(db.reply.quoted_message_stanza_id)
.value_null(db.reply.quoted_message_from)
.perform();
}
public Gee.List<Xep.FallbackIndication.Fallback> get_fallbacks() {
if (fallbacks != null) return fallbacks;
fetch_body_meta();
return fallbacks;
}
public Gee.List<Xep.MessageMarkup.Span> get_markups() {
if (markups != null) return markups;
fetch_body_meta();
return markups;
}
public void persist_markups(Gee.List<Xep.MessageMarkup.Span> markups, int message_id) {
this.markups = markups;
foreach (var span in markups) {
foreach (var ty in span.types) {
db.body_meta.insert()
.value(db.body_meta.info_type, Xep.MessageMarkup.NS_URI)
.value(db.body_meta.message_id, message_id)
.value(db.body_meta.info, Xep.MessageMarkup.span_type_to_str(ty))
.value(db.body_meta.from_char, span.start_char)
.value(db.body_meta.to_char, span.end_char)
.perform();
}
}
}
private void fetch_body_meta() {
var fallbacks_by_ns = new HashMap<string, ArrayList<Xep.FallbackIndication.FallbackLocation>>();
var markups = new ArrayList<Xep.MessageMarkup.Span>();
foreach (Qlite.Row row in db.body_meta.select().with(db.body_meta.message_id, "=", id)) {
switch (row[db.body_meta.info_type]) {
case Xep.FallbackIndication.NS_URI:
string ns_uri = row[db.body_meta.info];
if (!fallbacks_by_ns.has_key(ns_uri)) {
fallbacks_by_ns[ns_uri] = new ArrayList<Xep.FallbackIndication.FallbackLocation>();
}
fallbacks_by_ns[ns_uri].add(new Xep.FallbackIndication.FallbackLocation.partial_body(row[db.body_meta.from_char], row[db.body_meta.to_char]));
break;
case Xep.MessageMarkup.NS_URI:
var types = new ArrayList<Xep.MessageMarkup.SpanType>();
types.add(Xep.MessageMarkup.str_to_span_type(row[db.body_meta.info]));
markups.add(new Xep.MessageMarkup.Span() { types=types, start_char=row[db.body_meta.from_char], end_char=row[db.body_meta.to_char] });
break;
}
}
var fallbacks = new ArrayList<Xep.FallbackIndication.Fallback>();
foreach (string ns_uri in fallbacks_by_ns.keys) {
fallbacks.add(new Xep.FallbackIndication.Fallback(ns_uri, fallbacks_by_ns[ns_uri]));
}
this.fallbacks = fallbacks;
this.markups = markups;
}
public void set_fallbacks(Gee.List<Xep.FallbackIndication.Fallback> fallbacks) {
if (id == -1) {
warning("Message needs to be persisted before setting fallbacks");
return;
}
this.fallbacks = fallbacks;
foreach (var fallback in fallbacks) {
foreach (var location in fallback.locations) {
db.body_meta.insert()
.value(db.body_meta.message_id, id)
.value(db.body_meta.info_type, Xep.FallbackIndication.NS_URI)
.value(db.body_meta.info, fallback.ns_uri)
.value(db.body_meta.from_char, location.from_char)
.value(db.body_meta.to_char, location.to_char)
.perform();
}
}
}
public void set_type_string(string type) {
switch (type) {
case Xmpp.MessageStanza.TYPE_CHAT:
@ -274,7 +172,6 @@ public class Message : Object {
}
public static uint hash_func(Message message) {
if (message.body == null) return 0;
return message.body.hash();
}
@ -313,13 +210,6 @@ public class Message : Object {
.value(db.real_jid.real_jid, real_jid.to_string())
.perform();
}
if (sp.get_name() == "quoted-item-id") {
db.reply.upsert()
.value(db.reply.message_id, id, true)
.value(db.reply.quoted_content_item_id, quoted_item_id)
.perform();
}
}
}

View File

@ -67,7 +67,6 @@ public class Settings : Object {
}
}
// There is currently no spell checking for GTK4, thus there is currently no UI for this setting.
private bool check_spelling_;
public bool check_spelling {
get { return check_spelling_; }
@ -79,24 +78,6 @@ public class Settings : Object {
check_spelling_ = value;
}
}
public Encryption get_default_encryption(Account account) {
string? setting = db.account_settings.get_value(account.id, "default-encryption");
if (setting != null) {
return (Encryption) int.parse(setting);
}
return Encryption.OMEMO;
}
public void set_default_encryption(Account account, Encryption encryption) {
db.account_settings.upsert()
.value(db.account_settings.key, "default-encryption", true)
.value(db.account_settings.account_id, account.id, true)
.value(db.account_settings.value, ((int)encryption).to_string())
.perform();
}
}
}

View File

@ -12,8 +12,7 @@ public enum Priority {
}
public enum WidgetType {
GTK3,
GTK4
GTK
}
public interface RootInterface : Object {
@ -28,8 +27,6 @@ public interface EncryptionListEntry : Object {
public abstract void encryption_activated(Entities.Conversation conversation, Plugins.SetInputFieldStatus callback);
public abstract Object? get_encryption_icon(Entities.Conversation conversation, ContentItem content_item);
public abstract string? get_encryption_icon_name(Entities.Conversation conversation, ContentItem content_item);
}
public interface CallEncryptionEntry : Object {
@ -48,26 +45,21 @@ public abstract class AccountSettingsEntry : Object {
public abstract string name { get; }
public virtual int16 label_top_padding { get { return -1; } }
public abstract signal void activated();
public abstract void deactivate();
public abstract void set_account(Account account);
public abstract Object? get_widget(WidgetType type);
public abstract AccountSettingsWidget? get_widget(WidgetType type);
}
public abstract class EncryptionPreferencesEntry : Object {
public abstract string id { get; }
public virtual Priority priority { get { return Priority.DEFAULT; } }
public interface AccountSettingsWidget : Object {
public abstract void set_account(Account account);
public abstract Object? get_widget(Account account, WidgetType type);
public abstract signal void activated();
public abstract void deactivate();
}
public interface ContactDetailsProvider : Object {
public abstract string id { get; }
public abstract string tab { get; }
public abstract void populate(Conversation conversation, ContactDetails contact_details, WidgetType type);
public abstract Object? get_widget(Conversation conversation);
}
public class ContactDetails : Object {
@ -84,8 +76,10 @@ public interface TextCommand : Object {
public interface ConversationTitlebarEntry : Object {
public abstract string id { get; }
public abstract double order { get; }
public abstract Object? get_widget(WidgetType type);
public abstract ConversationTitlebarWidget? get_widget(WidgetType type);
}
public interface ConversationTitlebarWidget : Object {
public abstract void set_conversation(Conversation conversation);
public abstract void unset_conversation();
}
@ -102,7 +96,7 @@ public abstract interface ConversationAdditionPopulator : ConversationItemPopula
public abstract interface VideoCallPlugin : Object {
public abstract bool supported();
public abstract bool supports(string? media);
// Video widget
public abstract VideoCallWidget? create_widget(WidgetType type);
@ -152,22 +146,14 @@ public abstract class MetaConversationItem : Object {
public bool requires_header { get; set; default=false; }
public bool in_edit_mode { get; set; default=false; }
public abstract Object? get_widget(ConversationItemWidgetInterface outer, WidgetType type);
public abstract Object? get_widget(WidgetType type);
public abstract Gee.List<MessageAction>? get_item_actions(WidgetType type);
}
public interface ConversationItemWidgetInterface: Object {
public abstract void set_widget(Object object, WidgetType type, int priority);
}
public delegate void MessageActionEvoked(Variant? variant);
public delegate void MessageActionEvoked(Object button, Plugins.MetaConversationItem evoked_on, Object widget);
public class MessageAction : Object {
public string name;
public bool sensitive = true;
public string icon_name;
public string? tooltip;
public Object? popover;
public MessageActionEvoked? callback;
public MessageActionEvoked callback;
}
public abstract class MetaConversationNotification : Object {

View File

@ -3,24 +3,25 @@ using Gee;
namespace Dino.Plugins {
public class Registry {
public HashMap<Entities.Encryption, EncryptionListEntry> encryption_list_entries = new HashMap<Entities.Encryption, EncryptionListEntry>();
public HashMap<string, CallEncryptionEntry> call_encryption_entries = new HashMap<string, CallEncryptionEntry>();
public ArrayList<AccountSettingsEntry> account_settings_entries = new ArrayList<AccountSettingsEntry>();
public ArrayList<EncryptionPreferencesEntry> encryption_preferences_entries = new ArrayList<EncryptionPreferencesEntry>();
public ArrayList<ContactDetailsProvider> contact_details_entries = new ArrayList<ContactDetailsProvider>();
public Map<string, TextCommand> text_commands = new HashMap<string, TextCommand>();
public Gee.List<ConversationAdditionPopulator> conversation_addition_populators = new ArrayList<ConversationAdditionPopulator>();
public Gee.List<NotificationPopulator> notification_populators = new ArrayList<NotificationPopulator>();
public Gee.Collection<ConversationTitlebarEntry> conversation_titlebar_entries = new Gee.TreeSet<ConversationTitlebarEntry>((a, b) => {
internal ArrayList<EncryptionListEntry> encryption_list_entries = new ArrayList<EncryptionListEntry>();
internal HashMap<string, CallEncryptionEntry> call_encryption_entries = new HashMap<string, CallEncryptionEntry>();
internal ArrayList<AccountSettingsEntry> account_settings_entries = new ArrayList<AccountSettingsEntry>();
internal ArrayList<ContactDetailsProvider> contact_details_entries = new ArrayList<ContactDetailsProvider>();
internal Map<string, TextCommand> text_commands = new HashMap<string, TextCommand>();
internal Gee.List<ConversationAdditionPopulator> conversation_addition_populators = new ArrayList<ConversationAdditionPopulator>();
internal Gee.List<NotificationPopulator> notification_populators = new ArrayList<NotificationPopulator>();
internal Gee.Collection<ConversationTitlebarEntry> conversation_titlebar_entries = new Gee.TreeSet<ConversationTitlebarEntry>((a, b) => {
return (int)(a.order - b.order);
});
public VideoCallPlugin? video_call_plugin;
public bool register_encryption_list_entry(EncryptionListEntry entry) {
lock(encryption_list_entries) {
if (encryption_list_entries.has_key(entry.encryption)) return false;
encryption_list_entries[entry.encryption] = entry;
foreach(var e in encryption_list_entries) {
if (e.encryption == entry.encryption) return false;
}
encryption_list_entries.add(entry);
encryption_list_entries.sort((a,b) => b.name.collate(a.name));
return true;
}
}
@ -44,18 +45,6 @@ public class Registry {
}
}
public bool register_encryption_preferences_entry(EncryptionPreferencesEntry entry) {
lock(encryption_preferences_entries) {
foreach(var e in encryption_preferences_entries) {
if (e.id == entry.id) return false;
}
encryption_preferences_entries.add(entry);
// TODO: Order by priority
// encryption_preferences_entries.sort((a,b) => b.name.collate(a.name));
return true;
}
}
public bool register_contact_details_entry(ContactDetailsProvider entry) {
lock(contact_details_entries) {
foreach(ContactDetailsProvider e in contact_details_entries) {

View File

@ -12,7 +12,6 @@ public class AvatarManager : StreamInteractionModule, Object {
public string id { get { return IDENTITY.id; } }
public signal void received_avatar(Jid jid, Account account);
public signal void fetched_avatar(Jid jid, Account account);
private enum Source {
USER_AVATARS,
@ -24,7 +23,8 @@ public class AvatarManager : StreamInteractionModule, Object {
private string folder = null;
private HashMap<Jid, string> user_avatars = new HashMap<Jid, string>(Jid.hash_func, Jid.equals_func);
private HashMap<Jid, string> vcard_avatars = new HashMap<Jid, string>(Jid.hash_func, Jid.equals_func);
private HashSet<string> pending_fetch = new HashSet<string>();
private HashMap<string, Pixbuf> cached_pixbuf = new HashMap<string, Pixbuf>();
private HashMap<string, Gee.List<SourceFuncWrapper>> pending_pixbuf = new HashMap<string, Gee.List<SourceFuncWrapper>>();
private const int MAX_PIXEL = 192;
public static void start(StreamInteractor stream_interactor, Database db) {
@ -35,39 +35,8 @@ public class AvatarManager : StreamInteractionModule, Object {
private AvatarManager(StreamInteractor stream_interactor, Database db) {
this.stream_interactor = stream_interactor;
this.db = db;
File old_avatars = File.new_build_filename(Dino.get_storage_dir(), "avatars");
File new_avatars = File.new_build_filename(Dino.get_cache_dir(), "avatars");
this.folder = new_avatars.get_path();
// Move old avatar location to new one
if (old_avatars.query_exists()) {
if (!new_avatars.query_exists()) {
// Move old avatars folder (~/.local/share/dino) to new location (~/.cache/dino)
try {
new_avatars.get_parent().make_directory_with_parents();
} catch (Error e) { }
try {
old_avatars.move(new_avatars, FileCopyFlags.NONE);
debug("Avatars directory %s moved to %s", old_avatars.get_path(), new_avatars.get_path());
} catch (Error e) { }
} else {
// If both old and new folders exist, remove the old one
try {
FileEnumerator enumerator = old_avatars.enumerate_children("standard::*", FileQueryInfoFlags.NOFOLLOW_SYMLINKS);
FileInfo info = null;
while ((info = enumerator.next_file()) != null) {
FileUtils.remove(old_avatars.get_path() + "/" + info.get_name());
}
DirUtils.remove(old_avatars.get_path());
} catch (Error e) { }
}
}
// Create avatar folder
try {
new_avatars.make_directory_with_parents();
} catch (Error e) { }
this.folder = Path.build_filename(Dino.get_storage_dir(), "avatars");
DirUtils.create_with_parents(this.folder, 0700);
stream_interactor.account_added.connect(on_account_added);
stream_interactor.module_manager.initialize_account_modules.connect((_, modules) => {
@ -76,18 +45,6 @@ public class AvatarManager : StreamInteractionModule, Object {
});
}
public File? get_avatar_file(Account account, Jid jid_) {
string? hash = get_avatar_hash(account, jid_);
if (hash == null) return null;
File file = File.new_for_path(Path.build_filename(folder, hash));
if (!file.query_exists()) {
fetch_and_store_for_jid.begin(account, jid_);
return null;
} else {
return file;
}
}
private string? get_avatar_hash(Account account, Jid jid_) {
Jid jid = jid_;
if (!stream_interactor.get_module(MucManager.IDENTITY).is_groupchat_occupant(jid_, account)) {
@ -102,10 +59,79 @@ public class AvatarManager : StreamInteractionModule, Object {
}
}
public bool has_avatar_cached(Account account, Jid jid) {
string? hash = get_avatar_hash(account, jid);
return hash != null && cached_pixbuf.has_key(hash);
}
public bool has_avatar(Account account, Jid jid) {
return get_avatar_hash(account, jid) != null;
}
public Pixbuf? get_cached_avatar(Account account, Jid jid_) {
string? hash = get_avatar_hash(account, jid_);
if (hash == null) return null;
if (cached_pixbuf.has_key(hash)) return cached_pixbuf[hash];
return null;
}
public async Pixbuf? get_avatar(Account account, Jid jid_) {
Jid jid = jid_;
if (!stream_interactor.get_module(MucManager.IDENTITY).is_groupchat_occupant(jid_, account)) {
jid = jid_.bare_jid;
}
int source = -1;
string? hash = null;
if (user_avatars.has_key(jid)) {
hash = user_avatars[jid];
source = 1;
} else if (vcard_avatars.has_key(jid)) {
hash = vcard_avatars[jid];
source = 2;
}
if (hash == null) return null;
if (cached_pixbuf.has_key(hash)) {
return cached_pixbuf[hash];
}
XmppStream? stream = stream_interactor.get_stream(account);
if (stream == null || !stream.negotiation_complete) return null;
if (pending_pixbuf.has_key(hash)) {
pending_pixbuf[hash].add(new SourceFuncWrapper(get_avatar.callback));
yield;
return cached_pixbuf[hash];
}
pending_pixbuf[hash] = new ArrayList<SourceFuncWrapper>();
Pixbuf? image = yield get_image(hash);
if (image != null) {
cached_pixbuf[hash] = image;
} else {
Bytes? bytes = null;
if (source == 1) {
bytes = yield Xmpp.Xep.UserAvatars.fetch_image(stream, jid, hash);
} else if (source == 2) {
bytes = yield Xmpp.Xep.VCard.fetch_image(stream, jid, hash);
if (bytes == null && jid.is_bare()) {
db.avatar.delete().with(db.avatar.jid_id, "=", db.get_jid_id(jid)).perform();
}
}
if (bytes != null) {
store_image(hash, bytes);
image = yield get_image(hash);
}
cached_pixbuf[hash] = image;
}
foreach (SourceFuncWrapper sfw in pending_pixbuf[hash]) {
sfw.sfun();
}
return image;
}
public void publish(Account account, string file) {
try {
Pixbuf pixbuf = new Pixbuf.from_file(file);
@ -127,32 +153,30 @@ public class AvatarManager : StreamInteractionModule, Object {
}
}
public void unset_avatar(Account account) {
XmppStream stream = stream_interactor.get_stream(account);
if (stream == null) return;
Xmpp.Xep.UserAvatars.unset_avatar(stream);
}
private void on_account_added(Account account) {
stream_interactor.module_manager.get_module(account, Xep.UserAvatars.Module.IDENTITY).received_avatar_hash.connect((stream, jid, id) =>
on_user_avatar_received(account, jid, id)
on_user_avatar_received.begin(account, jid, id)
);
stream_interactor.module_manager.get_module(account, Xep.UserAvatars.Module.IDENTITY).avatar_removed.connect((stream, jid) => {
on_user_avatar_removed(account, jid);
});
stream_interactor.module_manager.get_module(account, Xep.VCard.Module.IDENTITY).received_avatar_hash.connect((stream, jid, id) =>
on_vcard_avatar_received(account, jid, id)
on_vcard_avatar_received.begin(account, jid, id)
);
foreach (var entry in get_avatar_hashes(account, Source.USER_AVATARS).entries) {
on_user_avatar_received(account, entry.key, entry.value);
user_avatars[entry.key] = entry.value;
}
foreach (var entry in get_avatar_hashes(account, Source.VCARD).entries) {
on_vcard_avatar_received(account, entry.key, entry.value);
// FIXME: remove. temporary to remove falsely saved avatars.
if (stream_interactor.get_module(MucManager.IDENTITY).is_groupchat(entry.key, account)) {
db.avatar.delete().with(db.avatar.jid_id, "=", db.get_jid_id(entry.key)).perform();
continue;
}
vcard_avatars[entry.key] = entry.value;
}
}
private void on_user_avatar_received(Account account, Jid jid_, string id) {
private async void on_user_avatar_received(Account account, Jid jid_, string id) {
Jid jid = jid_.bare_jid;
if (!user_avatars.has_key(jid) || user_avatars[jid] != id) {
@ -162,14 +186,7 @@ public class AvatarManager : StreamInteractionModule, Object {
received_avatar(jid, account);
}
private void on_user_avatar_removed(Account account, Jid jid_) {
Jid jid = jid_.bare_jid;
user_avatars.unset(jid);
remove_avatar_hash(account, jid, Source.USER_AVATARS);
received_avatar(jid, account);
}
private void on_vcard_avatar_received(Account account, Jid jid_, string id) {
private async void on_vcard_avatar_received(Account account, Jid jid_, string id) {
bool is_gc = stream_interactor.get_module(MucManager.IDENTITY).might_be_groupchat(jid_.bare_jid, account);
Jid jid = is_gc ? jid_ : jid_.bare_jid;
@ -191,14 +208,6 @@ public class AvatarManager : StreamInteractionModule, Object {
.perform();
}
public void remove_avatar_hash(Account account, Jid jid, int type) {
db.avatar.delete()
.with(db.avatar.jid_id, "=", db.get_jid_id(jid))
.with(db.avatar.account_id, "=", account.id)
.with(db.avatar.type_, "=", type)
.perform();
}
public HashMap<Jid, string> get_avatar_hashes(Account account, int type) {
HashMap<Jid, string> ret = new HashMap<Jid, string>(Jid.hash_func, Jid.equals_func);
foreach (Row row in db.avatar.select({db.avatar.jid_id, db.avatar.hash})
@ -209,53 +218,12 @@ public class AvatarManager : StreamInteractionModule, Object {
return ret;
}
public async bool fetch_and_store_for_jid(Account account, Jid jid) {
int source = -1;
string? hash = null;
if (user_avatars.has_key(jid)) {
hash = user_avatars[jid];
source = 1;
} else if (vcard_avatars.has_key(jid)) {
hash = vcard_avatars[jid];
source = 2;
} else {
return false;
}
XmppStream? stream = stream_interactor.get_stream(account);
if (stream == null || !stream.negotiation_complete) return false;
return yield fetch_and_store(stream, account, jid, source, hash);
}
private async bool fetch_and_store(XmppStream stream, Account account, Jid jid, int source, string? hash) {
if (hash == null || pending_fetch.contains(hash)) return false;
pending_fetch.add(hash);
Bytes? bytes = null;
if (source == 1) {
bytes = yield Xmpp.Xep.UserAvatars.fetch_image(stream, jid, hash);
} else if (source == 2) {
bytes = yield Xmpp.Xep.VCard.fetch_image(stream, jid, hash);
if (bytes == null && jid.is_bare()) {
db.avatar.delete().with(db.avatar.jid_id, "=", db.get_jid_id(jid)).perform();
}
}
if (bytes != null) {
yield store_image(hash, bytes);
fetched_avatar(jid, account);
}
pending_fetch.remove(hash);
return bytes != null;
}
private async void store_image(string id, Bytes data) {
public void store_image(string id, Bytes data) {
File file = File.new_for_path(Path.build_filename(folder, id));
try {
if (file.query_exists()) file.delete(); //TODO y?
DataOutputStream fos = new DataOutputStream(file.create(FileCreateFlags.REPLACE_DESTINATION));
yield fos.write_bytes_async(data);
fos.write_bytes_async.begin(data);
} catch (Error e) {
// Ignore: we failed in storing, so we refuse to display later...
}
@ -265,6 +233,29 @@ public class AvatarManager : StreamInteractionModule, Object {
File file = File.new_for_path(Path.build_filename(folder, id));
return file.query_exists();
}
public async Pixbuf? get_image(string id) {
try {
File file = File.new_for_path(Path.build_filename(folder, id));
FileInputStream stream = yield file.read_async(Priority.LOW);
uint8 fbuf[1024];
size_t size;
Checksum checksum = new Checksum (ChecksumType.SHA1);
while ((size = yield stream.read_async(fbuf, Priority.LOW)) > 0) {
checksum.update(fbuf, size);
}
if (checksum.get_string() != id) {
FileUtils.remove(file.get_path());
}
stream.seek(0, SeekType.SET);
return yield new Pixbuf.from_stream_async(stream, null);
} catch (Error e) {
return null;
}
}
}
}

View File

@ -9,7 +9,7 @@ public class Dino.PeerState : Object {
public signal void connection_ready();
public signal void session_terminated(bool we_terminated, string? reason_name, string? reason_text);
public signal void encryption_updated(Xep.Jingle.ContentEncryption? audio_encryption, Xep.Jingle.ContentEncryption? video_encryption);
public signal void encryption_updated(Xep.Jingle.ContentEncryption? audio_encryption, Xep.Jingle.ContentEncryption? video_encryption, bool same);
public StreamInteractor stream_interactor;
public CallState call_state;
@ -45,10 +45,7 @@ public class Dino.PeerState : Object {
this.stream_interactor = stream_interactor;
this.calls = stream_interactor.get_module(Calls.IDENTITY);
Xep.JingleRtp.Module jinglertp_module = stream_interactor.module_manager.get_module(call.account, Xep.JingleRtp.Module.IDENTITY);
if (jinglertp_module == null) return;
var session_info_type = jinglertp_module.session_info_type;
var session_info_type = stream_interactor.module_manager.get_module(call.account, Xep.JingleRtp.Module.IDENTITY).session_info_type;
session_info_type.mute_update_received.connect((session,mute, name) => {
if (this.sid != session.sid) return;
@ -412,7 +409,7 @@ public class Dino.PeerState : Object {
if ((audio_encryptions != null && audio_encryptions.is_empty) || (video_encryptions != null && video_encryptions.is_empty)) {
call.encryption = Encryption.NONE;
encryption_updated(null, null);
encryption_updated(null, null, true);
return;
}
@ -462,7 +459,7 @@ public class Dino.PeerState : Object {
encryption_keys_same = true;
}
encryption_updated(audio_encryption, video_encryption);
encryption_updated(audio_encryption, video_encryption, encryption_keys_same);
}
}

View File

@ -18,7 +18,6 @@ public class Dino.CallState : Object {
public bool use_cim = false;
public string? cim_call_id = null;
public Jid? cim_counterpart = null;
public ArrayList<Jid> cim_jids_to_inform = new ArrayList<Jid>();
public string cim_message_type { get; set; default=Xmpp.MessageStanza.TYPE_CHAT; }
public Xep.Muji.GroupCall? group_call { get; set; }
@ -50,7 +49,7 @@ public class Dino.CallState : Object {
}
internal async void initiate_groupchat_call(Jid muc) {
cim_jids_to_inform.add(muc);
parent_muc = muc;
cim_message_type = MessageStanza.TYPE_GROUPCHAT;
if (this.group_call == null) yield convert_into_group_call();
@ -98,27 +97,29 @@ public class Dino.CallState : Object {
accepted = true;
call.state = Call.State.ESTABLISHING;
XmppStream stream = stream_interactor.get_stream(call.account);
if (stream == null) return;
if (use_cim) {
if (invited_to_group_call != null) {
join_group_call.begin(invited_to_group_call);
foreach (Jid jid_to_inform in cim_jids_to_inform) {
stream.get_module(Xep.CallInvites.Module.IDENTITY).send_muji_accept(stream, jid_to_inform, cim_call_id, invited_to_group_call, cim_message_type);
}
XmppStream stream = stream_interactor.get_stream(call.account);
if (stream == null) return;
StanzaNode? inner_node = null;
if (group_call != null) {
inner_node = new StanzaNode.build("muji", Xep.Muji.NS_URI).add_self_xmlns()
.put_attribute("room", group_call.muc_jid.to_string());
} else if (peers.size == 1) {
string sid = peers.values.to_array()[0].sid;
foreach (Jid jid_to_inform in cim_jids_to_inform) {
stream.get_module(Xep.CallInvites.Module.IDENTITY).send_jingle_accept(stream, jid_to_inform, cim_call_id, sid, cim_message_type);
foreach (PeerState peer in peers.values) {
inner_node = new StanzaNode.build("jingle", Xep.CallInvites.NS_URI)
.put_attribute("sid", peer.sid);
}
}
stream.get_module(Xep.CallInvites.Module.IDENTITY).send_accept(stream, cim_counterpart, cim_call_id, inner_node, cim_message_type);
} else {
foreach (PeerState peer in peers.values) {
peer.accept();
}
}
if (invited_to_group_call != null) {
join_group_call.begin(invited_to_group_call);
}
}
public void reject() {
@ -127,10 +128,7 @@ public class Dino.CallState : Object {
if (use_cim) {
XmppStream stream = stream_interactor.get_stream(call.account);
if (stream == null) return;
foreach (Jid jid_to_inform in cim_jids_to_inform) {
stream.get_module(Xep.CallInvites.Module.IDENTITY).send_reject(stream, jid_to_inform, cim_call_id, cim_message_type);
}
stream.get_module(Xep.CallInvites.Module.IDENTITY).send_reject(stream, cim_counterpart, cim_call_id, cim_message_type);
}
var peers_cpy = new ArrayList<PeerState>();
peers_cpy.add_all(peers.values);
@ -144,38 +142,32 @@ public class Dino.CallState : Object {
var peers_cpy = new ArrayList<PeerState>();
peers_cpy.add_all(peers.values);
// Terminate sessions, send out messages about the ended call, exit MUC if applicable
XmppStream stream = stream_interactor.get_stream(call.account);
if (stream != null) {
if (group_call != null) {
if (group_call != null) {
XmppStream stream = stream_interactor.get_stream(call.account);
if (stream != null) {
stream.get_module(Xep.Muc.Module.IDENTITY).exit(stream, group_call.muc_jid);
}
if (call.state == Call.State.IN_PROGRESS || call.state == Call.State.ESTABLISHING) {
foreach (PeerState peer in peers_cpy) {
peer.end(Xep.Jingle.ReasonElement.SUCCESS, reason_text);
}
if (use_cim) {
foreach (Jid jid_to_inform in cim_jids_to_inform) {
stream.get_module(Xep.CallInvites.Module.IDENTITY).send_left(stream, jid_to_inform, cim_call_id, cim_message_type);
}
}
} else if (call.state == Call.State.RINGING) {
foreach (PeerState peer in peers_cpy) {
peer.end(Xep.Jingle.ReasonElement.CANCEL, reason_text);
}
if (call.direction == Call.DIRECTION_OUTGOING && use_cim) {
foreach (Jid jid_to_inform in cim_jids_to_inform) {
stream.get_module(Xep.CallInvites.Module.IDENTITY).send_retract(stream, jid_to_inform, cim_call_id, cim_message_type);
}
}
}
}
// Update the call state
if (call.state == Call.State.IN_PROGRESS || call.state == Call.State.ESTABLISHING) {
foreach (PeerState peer in peers_cpy) {
peer.end(Xep.Jingle.ReasonElement.SUCCESS, reason_text);
}
if (use_cim) {
XmppStream stream = stream_interactor.get_stream(call.account);
if (stream == null) return;
stream.get_module(Xep.CallInvites.Module.IDENTITY).send_finish(stream, cim_counterpart, cim_call_id, cim_message_type);
}
call.state = Call.State.ENDED;
} else if (call.state == Call.State.RINGING) {
foreach (PeerState peer in peers_cpy) {
peer.end(Xep.Jingle.ReasonElement.CANCEL, reason_text);
}
if (call.direction == Call.DIRECTION_OUTGOING && use_cim) {
XmppStream stream = stream_interactor.get_stream(call.account);
if (stream == null) return;
stream.get_module(Xep.CallInvites.Module.IDENTITY).send_retract(stream, cim_counterpart, cim_call_id, cim_message_type);
}
call.state = Call.State.MISSED;
} else {
return;

View File

@ -61,6 +61,8 @@ namespace Dino {
call_state.initiate_groupchat_call.begin(conversation.counterpart);
}
conversation.last_active = call.time;
call_outgoing(call, call_state, conversation);
return call_state;
@ -70,16 +72,17 @@ namespace Dino {
Plugins.VideoCallPlugin? plugin = Application.get_default().plugin_registry.video_call_plugin;
if (plugin == null) return false;
return plugin.supported();
return plugin.supports(null);
}
public async bool can_conversation_do_calls(Conversation conversation) {
if (!can_we_do_calls(conversation.account)) return false;
if (conversation.type_ == Conversation.Type.CHAT) {
return !conversation.counterpart.equals_bare(conversation.account.bare_jid);
return (yield get_call_resources(conversation.account, conversation.counterpart)).size > 0 || has_jmi_resources(conversation.counterpart);
} else {
bool is_private = stream_interactor.get_module(MucManager.IDENTITY).is_private_room(conversation.account, conversation.counterpart);
EntityInfo entity_info = stream_interactor.get_module(EntityInfo.IDENTITY);
return is_private && can_initiate_groupcall(conversation.account);
}
}
@ -219,6 +222,7 @@ namespace Dino {
Conversation conversation = stream_interactor.get_module(ConversationManager.IDENTITY).create_conversation(call.counterpart.bare_jid, account, Conversation.Type.CHAT);
stream_interactor.get_module(CallStore.IDENTITY).add_call(call, conversation);
conversation.last_active = call.time;
var call_state = new CallState(call, stream_interactor);
connect_call_state_signals(call_state);
@ -291,12 +295,12 @@ namespace Dino {
Conversation? conversation = stream_interactor.get_module(ConversationManager.IDENTITY).get_conversation(inviter_jid.bare_jid, account);
if (conversation == null) return null;
stream_interactor.get_module(CallStore.IDENTITY).add_call(call, conversation);
conversation.last_active = call.time;
CallState call_state = new CallState(call, stream_interactor);
connect_call_state_signals(call_state);
call_state.invited_to_group_call = muc_jid;
call_state.use_cim = true;
call_state.cim_jids_to_inform.add(inviter_jid.bare_jid);
call_state.parent_muc = inviter_jid.bare_jid;
debug("[%s] on_muji_call_received accepting", account.bare_jid.to_string());
@ -457,10 +461,11 @@ namespace Dino {
call_state.use_cim = true;
call_state.cim_call_id = call_id;
call_state.cim_jids_to_inform.add(message_stanza.type_ == MessageStanza.TYPE_GROUPCHAT ? from_jid.bare_jid : from_jid);
call_state.cim_counterpart = message_stanza.type_ == MessageStanza.TYPE_GROUPCHAT ? from_jid.bare_jid : from_jid;
call_state.cim_message_type = message_stanza.type_;
Conversation? conversation = stream_interactor.get_module(ConversationManager.IDENTITY).approx_conversation_for_stanza(from_jid, to_jid, account, message_stanza.type_);
conversation.last_active = call_state.call.time;
if (conversation == null) return;
if (call_state.call.direction == Call.DIRECTION_INCOMING) {

View File

@ -188,7 +188,7 @@ public class ChatInteraction : StreamInteractionModule, Object {
}
public override async bool run(Entities.Message message, Xmpp.MessageStanza stanza, Conversation conversation) {
if (Xmpp.MessageArchiveManagement.MessageFlag.get_flag(stanza) != null) return false;
if (Xep.MessageArchiveManagement.MessageFlag.get_flag(stanza) != null) return false;
ChatInteraction outer = stream_interactor.get_module(ChatInteraction.IDENTITY);
outer.send_delivery_receipt(message, stanza, conversation);

View File

@ -114,7 +114,7 @@ public class ConnectionManager : Object {
Timeout.add_seconds(60, () => {
foreach (Account account in connections.keys) {
if (connections[account].last_activity == null ||
if (connections[account].last_activity != null &&
connections[account].last_activity.compare(new DateTime.now_utc().add_minutes(-1)) < 0) {
check_reconnect(account);
}
@ -179,12 +179,13 @@ public class ConnectionManager : Object {
}
}
private async void connect_stream(Account account) {
private async void connect_stream(Account account, string? resource = null) {
if (!connections.has_key(account)) return;
debug("[%s] (Maybe) Establishing a new connection", account.bare_jid.to_string());
connection_errors.unset(account);
if (resource == null) resource = account.resourcepart;
XmppStreamResult stream_result;
@ -200,7 +201,7 @@ public class ConnectionManager : Object {
connection_directly_retry[account] = false;
change_connection_state(account, ConnectionState.CONNECTING);
stream_result = yield Xmpp.establish_stream(account.bare_jid, module_manager.get_modules(account), log_options,
stream_result = yield Xmpp.establish_stream(account.bare_jid, module_manager.get_modules(account, resource), log_options,
(peer_cert, errors) => { return on_invalid_certificate(account.domainpart, peer_cert, errors); }
);
connections[account].stream = stream_result.stream;
@ -225,7 +226,7 @@ public class ConnectionManager : Object {
XmppStream stream = stream_result.stream;
debug("[%s] New connection: %p", account.full_jid.to_string(), stream);
debug("[%s] New connection with resource %s: %p", account.bare_jid.to_string(), resource, stream);
connections[account].established = new DateTime.now_utc();
stream.attached_modules.connect((stream) => {
@ -254,7 +255,6 @@ public class ConnectionManager : Object {
debug("[%s %p] Connection error: %s", account.bare_jid.to_string(), stream, e.message);
change_connection_state(account, ConnectionState.DISCONNECTED);
if (!connections.has_key(account)) return;
connections[account].reset();
StreamError.Flag? flag = stream.get_flag(StreamError.Flag.IDENTITY);
@ -263,8 +263,7 @@ public class ConnectionManager : Object {
set_connection_error(account, new ConnectionError(ConnectionError.Source.STREAM_ERROR, flag.error_type));
if (flag.resource_rejected) {
account.set_random_resource();
connect_stream.begin(account);
connect_stream.begin(account, account.resourcepart + "-" + random_uuid());
return;
}
}

View File

@ -1,58 +0,0 @@
using Xmpp;
using Gee;
using Qlite;
using Dino.Entities;
public class Dino.Model.ConversationDisplayName : Object {
public string display_name { get; set; }
}
namespace Dino {
public class ContactModels : StreamInteractionModule, Object {
public static ModuleIdentity<ContactModels> IDENTITY = new ModuleIdentity<ContactModels>("contact_models");
public string id { get { return IDENTITY.id; } }
private StreamInteractor stream_interactor;
private HashMap<Conversation, Model.ConversationDisplayName> conversation_models = new HashMap<Conversation, Model.ConversationDisplayName>(Conversation.hash_func, Conversation.equals_func);
public static void start(StreamInteractor stream_interactor) {
ContactModels m = new ContactModels(stream_interactor);
stream_interactor.add_module(m);
}
private ContactModels(StreamInteractor stream_interactor) {
this.stream_interactor = stream_interactor;
stream_interactor.get_module(MucManager.IDENTITY).room_info_updated.connect((account, jid) => {
check_update_models(account, jid, Conversation.Type.GROUPCHAT);
});
stream_interactor.get_module(MucManager.IDENTITY).private_room_occupant_updated.connect((account, room, occupant) => {
check_update_models(account, room, Conversation.Type.GROUPCHAT);
});
stream_interactor.get_module(MucManager.IDENTITY).subject_set.connect((account, jid, subject) => {
check_update_models(account, jid, Conversation.Type.GROUPCHAT);
});
stream_interactor.get_module(RosterManager.IDENTITY).updated_roster_item.connect((account, jid, roster_item) => {
check_update_models(account, jid, Conversation.Type.CHAT);
});
}
private void check_update_models(Account account, Jid jid, Conversation.Type conversation_ty) {
var conversation = stream_interactor.get_module(ConversationManager.IDENTITY).get_conversation(jid, account, conversation_ty);
if (conversation == null) return;
var display_name_model = conversation_models[conversation];
if (display_name_model == null) return;
display_name_model.display_name = Dino.get_conversation_display_name(stream_interactor, conversation, "%s (%s)");
}
public Model.ConversationDisplayName get_display_name_model(Conversation conversation) {
if (conversation_models.has_key(conversation)) return conversation_models[conversation];
var model = new Model.ConversationDisplayName();
model.display_name = Dino.get_conversation_display_name(stream_interactor, conversation, "%s (%s)");
conversation_models[conversation] = model;
return model;
}
}
}

View File

@ -40,12 +40,41 @@ public class ContentItemStore : StreamInteractionModule, Object {
collection_conversations.unset(conversation);
}
private Gee.List<ContentItem> get_items_from_query(QueryBuilder select, Conversation conversation) {
public Gee.List<ContentItem> get_items_from_query(QueryBuilder select, Conversation conversation) {
Gee.TreeSet<ContentItem> items = new Gee.TreeSet<ContentItem>(ContentItem.compare_func);
foreach (var row in select) {
ContentItem content_item = get_item_from_row(row, conversation);
items.add(content_item);
int provider = row[db.content_item.content_type];
int foreign_id = row[db.content_item.foreign_id];
DateTime time = new DateTime.from_unix_utc(row[db.content_item.time]);
switch (provider) {
case 1:
Message? message = stream_interactor.get_module(MessageStorage.IDENTITY).get_message_by_id(foreign_id, conversation);
if (message != null) {
var message_item = new MessageItem(message, conversation, row[db.content_item.id]);
message_item.time = time; // In case of message corrections, the original time should be used
items.add(message_item);
}
break;
case 2:
FileTransfer? file_transfer = stream_interactor.get_module(FileTransferStorage.IDENTITY).get_file_by_id(foreign_id, conversation);
if (file_transfer != null) {
Message? message = null;
if (file_transfer.provider == 0 && file_transfer.info != null) {
message = stream_interactor.get_module(MessageStorage.IDENTITY).get_message_by_id(int.parse(file_transfer.info), conversation);
}
var file_item = new FileItem(file_transfer, conversation, row[db.content_item.id], message);
items.add(file_item);
}
break;
case 3:
Call? call = stream_interactor.get_module(CallStore.IDENTITY).get_call_by_id(foreign_id, conversation);
if (call != null) {
var call_item = new CallItem(call, conversation, row[db.content_item.id]);
items.add(call_item);
}
break;
}
}
Gee.List<ContentItem> ret = new ArrayList<ContentItem>();
@ -55,50 +84,7 @@ public class ContentItemStore : StreamInteractionModule, Object {
return ret;
}
private ContentItem get_item_from_row(Row row, Conversation conversation) throws Error {
int id = row[db.content_item.id];
int content_type = row[db.content_item.content_type];
int foreign_id = row[db.content_item.foreign_id];
DateTime time = new DateTime.from_unix_utc(row[db.content_item.time]);
return get_item(conversation, id, content_type, foreign_id, time);
}
private ContentItem get_item(Conversation conversation, int id, int content_type, int foreign_id, DateTime time) throws Error {
switch (content_type) {
case 1:
Message? message = stream_interactor.get_module(MessageStorage.IDENTITY).get_message_by_id(foreign_id, conversation);
if (message != null) {
var message_item = new MessageItem(message, conversation, id);
message_item.time = time; // In case of message corrections, the original time should be used
return message_item;
}
break;
case 2:
FileTransfer? file_transfer = stream_interactor.get_module(FileTransferStorage.IDENTITY).get_file_by_id(foreign_id, conversation);
if (file_transfer != null) {
Message? message = null;
if (file_transfer.provider == 0 && file_transfer.info != null) {
message = stream_interactor.get_module(MessageStorage.IDENTITY).get_message_by_id(int.parse(file_transfer.info), conversation);
}
var file_item = new FileItem(file_transfer, conversation, id, message);
return file_item;
}
break;
case 3:
Call? call = stream_interactor.get_module(CallStore.IDENTITY).get_call_by_id(foreign_id, conversation);
if (call != null) {
var call_item = new CallItem(call, conversation, id);
return call_item;
}
break;
default:
warning("Unknown content item type: %i", content_type);
break;
}
throw new Error(-1, 0, "Bad content type %i or non existing content item %i", content_type, foreign_id);
}
public ContentItem? get_item_by_foreign(Conversation conversation, int type, int foreign_id) {
public ContentItem? get_item(Conversation conversation, int type, int foreign_id) {
QueryBuilder select = db.content_item.select()
.with(db.content_item.content_type, "=", type)
.with(db.content_item.foreign_id, "=", foreign_id);
@ -117,85 +103,6 @@ public class ContentItemStore : StreamInteractionModule, Object {
return item.size > 0 ? item[0] : null;
}
public string? get_message_id_for_content_item(Conversation conversation, ContentItem content_item) {
Message? message = get_message_for_content_item(conversation, content_item);
if (message == null) return null;
return MessageStorage.get_reference_id(message);
}
public Jid? get_message_sender_for_content_item(Conversation conversation, ContentItem content_item) {
Message? message = get_message_for_content_item(conversation, content_item);
if (message == null) return null;
// No need to look at edit_to, because it's the same sender JID.
return message.from;
}
public Message? get_message_for_content_item(Conversation conversation, ContentItem content_item) {
FileItem? file_item = content_item as FileItem;
if (file_item != null) {
if (file_item.file_transfer.provider != 0 || file_item.file_transfer.info == null) return null;
int message_db_id = int.parse(file_item.file_transfer.info);
return stream_interactor.get_module(MessageStorage.IDENTITY).get_message_by_id(message_db_id, conversation);
}
MessageItem? message_item = content_item as MessageItem;
if (message_item != null) {
return message_item.message;
}
return null;
}
public ContentItem? get_content_item_for_message_id(Conversation conversation, string message_id) {
Row? row = get_content_item_row_for_message_id(conversation, message_id);
if (row != null) {
return get_item_from_row(row, conversation);
}
return null;
}
public int get_content_item_id_for_message_id(Conversation conversation, string message_id) {
Row? row = get_content_item_row_for_message_id(conversation, message_id);
if (row != null) {
return row[db.content_item.id];
}
return -1;
}
private Row? get_content_item_row_for_message_id(Conversation conversation, string message_id) {
var content_item_row = db.content_item.select();
Message? message = null;
if (conversation.type_ == Conversation.Type.CHAT) {
message = stream_interactor.get_module(MessageStorage.IDENTITY).get_message_by_stanza_id(message_id, conversation);
} else {
message = stream_interactor.get_module(MessageStorage.IDENTITY).get_message_by_server_id(message_id, conversation);
}
if (message == null) return null;
RowOption file_transfer_row = db.file_transfer.select()
.with(db.file_transfer.account_id, "=", conversation.account.id)
.with(db.file_transfer.counterpart_id, "=", db.get_jid_id(conversation.counterpart))
.with(db.file_transfer.info, "=", message.id.to_string())
.order_by(db.file_transfer.time, "DESC")
.single().row();
if (file_transfer_row.is_present()) {
content_item_row.with(db.content_item.foreign_id, "=", file_transfer_row[db.file_transfer.id])
.with(db.content_item.content_type, "=", 2);
} else {
content_item_row.with(db.content_item.foreign_id, "=", message.id)
.with(db.content_item.content_type, "=", 1);
}
RowOption content_item_row_option = content_item_row.single().row();
if (content_item_row_option.is_present()) {
return content_item_row_option.inner;
}
return null;
}
public ContentItem? get_latest(Conversation conversation) {
Gee.List<ContentItem> items = get_n_latest(conversation, 1);
if (items.size > 0) {
@ -215,26 +122,6 @@ public class ContentItemStore : StreamInteractionModule, Object {
return get_items_from_query(select, conversation);
}
// public Gee.List<ContentItemMeta> get_latest_meta(Conversation conversation, int count) {
// QueryBuilder select = db.content_item.select()
// .with(db.content_item.conversation_id, "=", conversation.id)
// .with(db.content_item.hide, "=", false)
// .order_by(db.content_item.time, "DESC")
// .order_by(db.content_item.id, "DESC")
// .limit(count);
//
// var ret = new ArrayList<ContentItemMeta>();
// foreach (var row in select) {
// var item_meta = new ContentItemMeta() {
// id = row[db.content_item.id],
// content_type = row[db.content_item.content_type],
// foreign_id = row[db.content_item.foreign_id],
// time = new DateTime.from_unix_utc(row[db.content_item.time])
// };
// }
// return ret;
// }
public Gee.List<ContentItem> get_before(Conversation conversation, ContentItem item, int count) {
long time = (long) item.time.to_unix();
QueryBuilder select = db.content_item.select()

View File

@ -29,8 +29,6 @@ public class ConversationManager : StreamInteractionModule, Object {
stream_interactor.account_removed.connect(on_account_removed);
stream_interactor.get_module(MessageProcessor.IDENTITY).received_pipeline.connect(new MessageListener(stream_interactor));
stream_interactor.get_module(MessageProcessor.IDENTITY).message_sent.connect(handle_sent_message);
stream_interactor.get_module(Calls.IDENTITY).call_incoming.connect(handle_new_call);
stream_interactor.get_module(Calls.IDENTITY).call_outgoing.connect(handle_new_call);
}
public Conversation create_conversation(Jid jid, Account account, Conversation.Type? type = null) {
@ -48,28 +46,18 @@ public class ConversationManager : StreamInteractionModule, Object {
// Create a new converation
Conversation conversation = new Conversation(jid, account, type);
// Set encryption for conversation
if (type == Conversation.Type.CHAT ||
(type == Conversation.Type.GROUPCHAT && stream_interactor.get_module(MucManager.IDENTITY).is_private_room(account, jid))) {
conversation.encryption = Application.get_default().settings.get_default_encryption(account);
} else {
conversation.encryption = Encryption.NONE;
}
add_conversation(conversation);
conversation.persist(db);
return conversation;
}
public Conversation? get_conversation_for_message(Entities.Message message) {
if (conversations.has_key(message.account)) {
if (message.type_ == Entities.Message.Type.CHAT) {
return create_conversation(message.counterpart.bare_jid, message.account, Conversation.Type.CHAT);
} else if (message.type_ == Entities.Message.Type.GROUPCHAT) {
return create_conversation(message.counterpart.bare_jid, message.account, Conversation.Type.GROUPCHAT);
} else if (message.type_ == Entities.Message.Type.GROUPCHAT_PM) {
return create_conversation(message.counterpart, message.account, Conversation.Type.GROUPCHAT_PM);
}
if (message.type_ == Entities.Message.Type.CHAT) {
return create_conversation(message.counterpart.bare_jid, message.account, Conversation.Type.CHAT);
} else if (message.type_ == Entities.Message.Type.GROUPCHAT) {
return create_conversation(message.counterpart.bare_jid, message.account, Conversation.Type.GROUPCHAT);
} else if (message.type_ == Entities.Message.Type.GROUPCHAT_PM) {
return create_conversation(message.counterpart, message.account, Conversation.Type.GROUPCHAT_PM);
}
return null;
}
@ -188,7 +176,7 @@ public class ConversationManager : StreamInteractionModule, Object {
conversation.last_active = message.time;
if (stanza != null) {
bool is_mam_message = Xmpp.MessageArchiveManagement.MessageFlag.get_flag(stanza) != null;
bool is_mam_message = Xep.MessageArchiveManagement.MessageFlag.get_flag(stanza) != null;
bool is_recent = message.time.compare(new DateTime.now_utc().add_days(-3)) > 0;
if (is_mam_message && !is_recent) return false;
}
@ -206,11 +194,6 @@ public class ConversationManager : StreamInteractionModule, Object {
}
}
private void handle_new_call(Call call, CallState state, Conversation conversation) {
conversation.last_active = call.time;
start_conversation(conversation);
}
private void add_conversation(Conversation conversation) {
if (!conversations[conversation.account].has_key(conversation.counterpart)) {
conversations[conversation.account][conversation.counterpart] = new ArrayList<Conversation>(Conversation.equals_func);

View File

@ -154,7 +154,7 @@ public class CounterpartInteractionManager : StreamInteractionModule, Object {
conversation.read_up_to = message;
// TODO: This only marks messages as read, not http file transfers.
ContentItem? content_item = stream_interactor.get_module(ContentItemStore.IDENTITY).get_item_by_foreign(conversation, 1, message.id);
ContentItem? content_item = stream_interactor.get_module(ContentItemStore.IDENTITY).get_item(conversation, 1, message.id);
if (content_item == null) return;
ContentItem? read_up_to_item = stream_interactor.get_module(ContentItemStore.IDENTITY).get_item_by_id(conversation, conversation.read_up_to_item);
if (read_up_to_item != null && read_up_to_item.compare(content_item) > 0) return;

View File

@ -7,7 +7,7 @@ using Dino.Entities;
namespace Dino {
public class Database : Qlite.Database {
private const int VERSION = 29;
private const int VERSION = 22;
public class AccountTable : Table {
public Column<int> id = new Column.Integer("id") { primary_key = true, auto_increment = true };
@ -17,7 +17,6 @@ public class Database : Qlite.Database {
public Column<string> alias = new Column.Text("alias");
public Column<bool> enabled = new Column.BoolInt("enabled");
public Column<string> roster_version = new Column.Text("roster_version") { min_version=2 };
// no longer used. all usages already removed. remove db column at some point.
public Column<long> mam_earliest_synced = new Column.Long("mam_earliest_synced") { min_version=4 };
internal AccountTable(Database db) {
@ -94,29 +93,10 @@ public class Database : Qlite.Database {
// deduplication
index("message_account_counterpart_stanzaid_idx", {account_id, counterpart_id, stanza_id});
index("message_account_counterpart_serverid_idx", {account_id, counterpart_id, server_id});
// message by marked
index("message_account_marked_idx", {account_id, marked});
fts({body});
}
}
public class BodyMeta : Table {
public Column<int> id = new Column.Integer("id") { primary_key = true, auto_increment = true };
public Column<int> message_id = new Column.Integer("message_id");
public Column<int> from_char = new Column.Integer("from_char");
public Column<int> to_char = new Column.Integer("to_char");
public Column<string> info_type = new Column.Text("info_type");
public Column<string> info = new Column.Text("info");
internal BodyMeta(Database db) {
base(db, "body_meta");
init({id, message_id, from_char, to_char, info_type, info});
}
}
public class MessageCorrectionTable : Table {
public Column<int> id = new Column.Integer("id") { primary_key = true, auto_increment = true };
public Column<int> message_id = new Column.Integer("message_id") { unique=true };
@ -129,20 +109,6 @@ public class Database : Qlite.Database {
}
}
public class ReplyTable : Table {
public Column<int> id = new Column.Integer("id") { primary_key = true, auto_increment = true };
public Column<int> message_id = new Column.Integer("message_id") { not_null = true, unique=true };
public Column<int> quoted_content_item_id = new Column.Integer("quoted_message_id");
public Column<string?> quoted_message_stanza_id = new Column.Text("quoted_message_stanza_id");
public Column<string?> quoted_message_from = new Column.Text("quoted_message_from");
internal ReplyTable(Database db) {
base(db, "reply");
init({id, message_id, quoted_content_item_id, quoted_message_stanza_id, quoted_message_from});
index("reply_quoted_message_stanza_id", {quoted_message_stanza_id});
}
}
public class RealJidTable : Table {
public Column<int> message_id = new Column.Integer("message_id") { primary_key = true };
public Column<string> real_jid = new Column.Text("real_jid");
@ -153,20 +119,6 @@ public class Database : Qlite.Database {
}
}
public class OccupantIdTable : Table {
public Column<int> id = new Column.Integer("id") { primary_key = true };
public Column<int> account_id = new Column.Integer("account_id") { not_null = true };
public Column<string> last_nick = new Column.Text("last_nick");
public Column<int> jid_id = new Column.Integer("jid_id");
public Column<string> occupant_id = new Column.Text("occupant_id");
internal OccupantIdTable(Database db) {
base(db, "occupant_id");
init({id, account_id, last_nick, jid_id, occupant_id});
unique({account_id, jid_id, occupant_id}, "REPLACE");
}
}
public class UndecryptedTable : Table {
public Column<int> message_id = new Column.Integer("message_id");
public Column<int> type_ = new Column.Integer("type");
@ -180,7 +132,6 @@ public class Database : Qlite.Database {
public class FileTransferTable : Table {
public Column<int> id = new Column.Integer("id") { primary_key = true, auto_increment = true };
public Column<string> file_sharing_id = new Column.Text("file_sharing_id") { min_version=28 };
public Column<int> account_id = new Column.Integer("account_id") { not_null = true };
public Column<int> counterpart_id = new Column.Integer("counterpart_id") { not_null = true };
public Column<string> counterpart_resource = new Column.Text("counterpart_resource");
@ -192,58 +143,15 @@ public class Database : Qlite.Database {
public Column<string> file_name = new Column.Text("file_name");
public Column<string> path = new Column.Text("path");
public Column<string> mime_type = new Column.Text("mime_type");
public Column<long> size = new Column.Long("size");
public Column<int> size = new Column.Integer("size");
public Column<int> state = new Column.Integer("state");
public Column<int> provider = new Column.Integer("provider");
public Column<string> info = new Column.Text("info");
public Column<long> modification_date = new Column.Long("modification_date") { default = "-1", min_version=28 };
public Column<int> width = new Column.Integer("width") { default = "-1", min_version=28 };
public Column<int> height = new Column.Integer("height") { default = "-1", min_version=28 };
public Column<long> length = new Column.Integer("length") { default = "-1", min_version=28 };
internal FileTransferTable(Database db) {
base(db, "file_transfer");
init({id, file_sharing_id, account_id, counterpart_id, counterpart_resource, our_resource, direction,
time, local_time, encryption, file_name, path, mime_type, size, state, provider, info, modification_date,
width, height, length});
}
}
public class FileHashesTable : Table {
public Column<int> id = new Column.Integer("id");
public Column<string> algo = new Column.Text("algo") { not_null = true };
public Column<string> value = new Column.Text("value") { not_null = true };
internal FileHashesTable(Database db) {
base(db, "file_hashes");
init({id, algo, value});
unique({id, algo}, "REPLACE");
}
}
public class FileThumbnailsTable : Table {
public Column<int> id = new Column.Integer("id");
// TODO store data as bytes, not as data uri
public Column<string> uri = new Column.Text("uri") { not_null = true };
public Column<string> mime_type = new Column.Text("mime_type");
public Column<int> width = new Column.Integer("width");
public Column<int> height = new Column.Integer("height");
internal FileThumbnailsTable(Database db) {
base(db, "file_thumbnails");
init({id, uri, mime_type, width, height});
}
}
public class SourcesTable : Table {
public Column<int> file_transfer_id = new Column.Integer("file_transfer_id");
public Column<string> type = new Column.Text("type") { not_null = true };
public Column<string> data = new Column.Text("data") { not_null = true };
internal SourcesTable(Database db) {
base(db, "sfs_sources");
init({file_transfer_id, type, data});
index("sfs_sources_file_transfer_id_idx", {file_transfer_id});
init({id, account_id, counterpart_id, counterpart_resource, our_resource, direction, time, local_time,
encryption, file_name, path, mime_type, size, state, provider, info});
}
}
@ -285,7 +193,6 @@ public class Database : Qlite.Database {
public Column<int> jid_id = new Column.Integer("jid_id") { not_null = true };
public Column<string> resource = new Column.Text("resource") { min_version=1 };
public Column<bool> active = new Column.BoolInt("active");
public Column<long> active_last_changed = new Column.Integer("active_last_changed") { not_null=true, default="0", min_version=23 };
public Column<long> last_active = new Column.Long("last_active");
public Column<int> type_ = new Column.Integer("type");
public Column<int> encryption = new Column.Integer("encryption");
@ -294,11 +201,10 @@ public class Database : Qlite.Database {
public Column<int> notification = new Column.Integer("notification") { min_version=3 };
public Column<int> send_typing = new Column.Integer("send_typing") { min_version=3 };
public Column<int> send_marker = new Column.Integer("send_marker") { min_version=3 };
public Column<int> pinned = new Column.Integer("pinned") { default="0", min_version=25 };
internal ConversationTable(Database db) {
base(db, "conversation");
init({id, account_id, jid_id, resource, active, active_last_changed, last_active, type_, encryption, read_up_to, read_up_to_item, notification, send_typing, send_marker, pinned});
init({id, account_id, jid_id, resource, active, last_active, type_, encryption, read_up_to, read_up_to_item, notification, send_typing, send_marker});
}
}
@ -346,11 +252,10 @@ public class Database : Qlite.Database {
public Column<string> jid = new Column.Text("jid");
public Column<string> handle = new Column.Text("name");
public Column<string> subscription = new Column.Text("subscription");
public Column<string> ask = new Column.Text("ask") { min_version=29 };
internal RosterTable(Database db) {
base(db, "roster");
init({account_id, jid, handle, subscription, ask});
init({account_id, jid, handle, subscription});
unique({account_id, jid}, "IGNORE");
}
}
@ -358,33 +263,15 @@ public class Database : Qlite.Database {
public class MamCatchupTable : Table {
public Column<int> id = new Column.Integer("id") { primary_key = true, auto_increment = true };
public Column<int> account_id = new Column.Integer("account_id") { not_null = true };
public Column<string> server_jid = new Column.Text("server_jid") { not_null = true };
public Column<string> from_id = new Column.Text("from_id") { not_null = true };
public Column<bool> from_end = new Column.BoolInt("from_end");
public Column<string> from_id = new Column.Text("from_id");
public Column<long> from_time = new Column.Long("from_time") { not_null = true };
public Column<bool> from_end = new Column.BoolInt("from_end") { not_null = true };
public Column<string> to_id = new Column.Text("to_id") { not_null = true };
public Column<string> to_id = new Column.Text("to_id");
public Column<long> to_time = new Column.Long("to_time") { not_null = true };
internal MamCatchupTable(Database db) {
base(db, "mam_catchup");
init({id, account_id, server_jid, from_end, from_id, from_time, to_id, to_time});
}
}
public class ReactionTable : Table {
public Column<int> id = new Column.Integer("id") { primary_key = true, auto_increment = true };
public Column<int> account_id = new Column.Integer("account_id") { not_null = true };
public Column<int> occupant_id = new Column.Integer("occupant_id");
public Column<int> content_item_id = new Column.Integer("content_item_id") { not_null = true };
public Column<long> time = new Column.Long("time") { not_null = true };
public Column<int> jid_id = new Column.Integer("jid_id");
public Column<string> emojis = new Column.Text("emojis");
internal ReactionTable(Database db) {
base(db, "reaction");
init({id, account_id, occupant_id, content_item_id, time, jid_id, emojis});
unique({account_id, content_item_id, jid_id}, "REPLACE");
unique({account_id, content_item_id, occupant_id}, "REPLACE");
init({id, account_id, from_end, from_id, from_time, to_id, to_time});
}
}
@ -399,29 +286,6 @@ public class Database : Qlite.Database {
}
}
public class AccountSettingsTable : Table {
public Column<int> id = new Column.Integer("id") { primary_key = true, auto_increment = true };
public Column<int> account_id = new Column.Integer("account_id") { not_null = true };
public Column<string> key = new Column.Text("key") { not_null = true };
public Column<string> value = new Column.Text("value");
internal AccountSettingsTable(Database db) {
base(db, "account_settings");
init({id, account_id, key, value});
unique({account_id, key}, "REPLACE");
}
public string? get_value(int account_id, string key) {
var row_opt = select({value})
.with(this.account_id, "=", account_id)
.with(this.key, "=", key)
.single()
.row();
if (row_opt.is_present()) return row_opt[value];
return null;
}
}
public class ConversationSettingsTable : Table {
public Column<int> id = new Column.Integer("id") { primary_key = true, auto_increment = true };
public Column<int> conversation_id = new Column.Integer("conversation_id") {not_null=true};
@ -440,15 +304,9 @@ public class Database : Qlite.Database {
public EntityTable entity { get; private set; }
public ContentItemTable content_item { get; private set; }
public MessageTable message { get; private set; }
public BodyMeta body_meta { get; private set; }
public ReplyTable reply { get; private set; }
public MessageCorrectionTable message_correction { get; private set; }
public RealJidTable real_jid { get; private set; }
public OccupantIdTable occupantid { get; private set; }
public FileTransferTable file_transfer { get; private set; }
public FileHashesTable file_hashes { get; private set; }
public FileThumbnailsTable file_thumbnails { get; private set; }
public SourcesTable sfs_sources { get; private set; }
public CallTable call { get; private set; }
public CallCounterpartTable call_counterpart { get; private set; }
public ConversationTable conversation { get; private set; }
@ -457,9 +315,7 @@ public class Database : Qlite.Database {
public EntityFeatureTable entity_feature { get; private set; }
public RosterTable roster { get; private set; }
public MamCatchupTable mam_catchup { get; private set; }
public ReactionTable reaction { get; private set; }
public SettingsTable settings { get; private set; }
public AccountSettingsTable account_settings { get; private set; }
public ConversationSettingsTable conversation_settings { get; private set; }
public Map<int, Jid> jid_table_cache = new HashMap<int, Jid>();
@ -473,15 +329,9 @@ public class Database : Qlite.Database {
entity = new EntityTable(this);
content_item = new ContentItemTable(this);
message = new MessageTable(this);
body_meta = new BodyMeta(this);
message_correction = new MessageCorrectionTable(this);
reply = new ReplyTable(this);
occupantid = new OccupantIdTable(this);
real_jid = new RealJidTable(this);
file_transfer = new FileTransferTable(this);
file_hashes = new FileHashesTable(this);
file_thumbnails = new FileThumbnailsTable(this);
sfs_sources = new SourcesTable(this);
call = new CallTable(this);
call_counterpart = new CallCounterpartTable(this);
conversation = new ConversationTable(this);
@ -490,11 +340,9 @@ public class Database : Qlite.Database {
entity_feature = new EntityFeatureTable(this);
roster = new RosterTable(this);
mam_catchup = new MamCatchupTable(this);
reaction = new ReactionTable(this);
settings = new SettingsTable(this);
account_settings = new AccountSettingsTable(this);
conversation_settings = new ConversationSettingsTable(this);
init({ account, jid, entity, content_item, message, body_meta, message_correction, reply, real_jid, occupantid, file_transfer, file_hashes, file_thumbnails, sfs_sources, call, call_counterpart, conversation, avatar, entity_identity, entity_feature, roster, mam_catchup, reaction, settings, account_settings, conversation_settings });
init({ account, jid, entity, content_item, message, message_correction, real_jid, file_transfer, call, call_counterpart, conversation, avatar, entity_identity, entity_feature, roster, mam_catchup, settings, conversation_settings });
try {
exec("PRAGMA journal_mode = WAL");
@ -626,25 +474,6 @@ public class Database : Qlite.Database {
// FROM call2");
// exec("DROP TABLE call2");
}
if (oldVersion < 23) {
try {
exec("ALTER TABLE mam_catchup RENAME TO mam_catchup2");
mam_catchup.create_table_at_version(VERSION);
exec("""INSERT INTO mam_catchup (id, account_id, server_jid, from_id, from_time, from_end, to_id, to_time)
SELECT mam_catchup2.id, account_id, bare_jid, ifnull(from_id, ""), from_time, ifnull(from_end, 0), ifnull(to_id, ""), to_time
FROM mam_catchup2 JOIN account ON mam_catchup2.account_id=account.id""");
exec("DROP TABLE mam_catchup2");
} catch (Error e) {
error("Failed to upgrade to database version 23 (mam_catchup): %s", e.message);
}
try {
long active_last_updated = (long) new DateTime.now_utc().to_unix();
exec(@"UPDATE conversation SET active_last_changed=$active_last_updated WHERE active_last_changed=0");
} catch (Error e) {
error("Failed to upgrade to database version 23 (conversation): %s", e.message);
}
}
}
public ArrayList<Account> get_accounts() {
@ -652,9 +481,6 @@ public class Database : Qlite.Database {
foreach(Row row in account.select()) {
try {
Account account = new Account.from_row(this, row);
if (account_table_cache.has_key(account.id)) {
account = account_table_cache[account.id];
}
ret.add(account);
account_table_cache[account.id] = account;
} catch (InvalidJidError e) {

View File

@ -79,48 +79,22 @@ public class EntityInfo : StreamInteractionModule, Object {
}
public async bool has_feature(Account account, Jid jid, string feature) {
int has_feature_cached = has_feature_cached_int(account, jid, feature);
if (has_feature_cached != -1) {
return has_feature_cached == 1;
}
ServiceDiscovery.InfoResult? info_result = yield get_info_result(account, jid, entity_caps_hashes[jid]);
if (info_result == null) return false;
return info_result.features.contains(feature);
}
public bool has_feature_offline(Account account, Jid jid, string feature) {
int ret = has_feature_cached_int(account, jid, feature);
if (ret == -1) {
return db.entity.select()
.with(db.entity.account_id, "=", account.id)
.with(db.entity.jid_id, "=", db.get_jid_id(jid))
.with(db.entity.resource, "=", jid.resourcepart ?? "")
.join_with(db.entity_feature, db.entity.caps_hash, db.entity_feature.entity)
.with(db.entity_feature.feature, "=", feature)
.count() > 0;
}
return ret == 1;
}
public bool has_feature_cached(Account account, Jid jid, string feature) {
return has_feature_cached_int(account, jid, feature) == 1;
}
private int has_feature_cached_int(Account account, Jid jid, string feature) {
if (jid_features.has_key(jid)) {
return jid_features[jid].contains(feature) ? 1 : 0;
return jid_features[jid].contains(feature);
}
string? hash = entity_caps_hashes[jid];
if (hash != null) {
Gee.List<string>? features = get_stored_features(hash);
if (features != null) {
return features.contains(feature) ? 1 : 0;
return features.contains(feature);
}
}
return -1;
ServiceDiscovery.InfoResult? info_result = yield get_info_result(account, jid, hash);
if (info_result == null) return false;
return info_result.features.contains(feature);
}
private void on_received_available_presence(Account account, Presence.Stanza presence) {
@ -217,24 +191,13 @@ public class EntityInfo : StreamInteractionModule, Object {
ServiceDiscovery.InfoResult? info_result = yield stream.get_module(ServiceDiscovery.Module.IDENTITY).request_info(stream, jid);
if (info_result == null) return null;
var computed_hash = EntityCapabilities.Module.compute_hash_for_info_result(info_result);
if (hash == null || computed_hash == hash) {
db.entity.upsert()
.value(db.entity.account_id, account.id, true)
.value(db.entity.jid_id, db.get_jid_id(jid), true)
.value(db.entity.resource, jid.resourcepart ?? "", true)
.value(db.entity.last_seen, (long)(new DateTime.now_local()).to_unix())
.value(db.entity.caps_hash, computed_hash)
.perform();
store_features(computed_hash, info_result.features);
store_identities(computed_hash, info_result.identities);
if (hash != null && EntityCapabilities.Module.compute_hash_for_info_result(info_result) == hash) {
store_features(hash, info_result.features);
store_identities(hash, info_result.identities);
} else {
warning("Claimed entity caps hash from %s doesn't match computed one", jid.to_string());
jid_features[jid] = info_result.features;
jid_identity[jid] = info_result.identities;
}
jid_features[jid] = info_result.features;
jid_identity[jid] = info_result.identities;
return info_result;
}

View File

@ -1,72 +0,0 @@
using Gee;
using Qlite;
using Xmpp;
using Xmpp.Xep;
using Dino.Entities;
public class Dino.FallbackBody : StreamInteractionModule, Object {
public static ModuleIdentity<FallbackBody> IDENTITY = new ModuleIdentity<FallbackBody>("fallback-body");
public string id { get { return IDENTITY.id; } }
private StreamInteractor stream_interactor;
private Database db;
private ReceivedMessageListener received_message_listener;
public static void start(StreamInteractor stream_interactor, Database db) {
FallbackBody m = new FallbackBody(stream_interactor, db);
stream_interactor.add_module(m);
}
private FallbackBody(StreamInteractor stream_interactor, Database db) {
this.stream_interactor = stream_interactor;
this.db = db;
this.received_message_listener = new ReceivedMessageListener(stream_interactor, db);
stream_interactor.get_module(MessageProcessor.IDENTITY).received_pipeline.connect(received_message_listener);
}
private class ReceivedMessageListener : MessageListener {
public string[] after_actions_const = new string[]{ "STORE" };
public override string action_group { get { return "Quote"; } }
public override string[] after_actions { get { return after_actions_const; } }
private StreamInteractor stream_interactor;
private Database db;
public ReceivedMessageListener(StreamInteractor stream_interactor, Database db) {
this.stream_interactor = stream_interactor;
this.db = db;
}
public override async bool run(Entities.Message message, Xmpp.MessageStanza stanza, Conversation conversation) {
Gee.List<Xep.FallbackIndication.Fallback> fallbacks = Xep.FallbackIndication.get_fallbacks(stanza);
if (fallbacks.is_empty) return false;
foreach (var fallback in fallbacks) {
if (fallback.ns_uri != Xep.Replies.NS_URI) continue; // TODO what if it's not
}
message.set_fallbacks(fallbacks);
return false;
}
}
public static string get_quoted_fallback_body(ContentItem content_item) {
string fallback = "> ";
if (content_item.type_ == MessageItem.TYPE) {
Message? quoted_message = ((MessageItem) content_item).message;
fallback += Dino.message_body_without_reply_fallback(quoted_message);
fallback = fallback.replace("\n", "\n> ");
} else if (content_item.type_ == FileItem.TYPE) {
FileTransfer? quoted_file = ((FileItem) content_item).file_transfer;
fallback += quoted_file.file_name;
}
fallback += "\n";
return fallback;
}
}

View File

@ -2,7 +2,6 @@ using Gdk;
using Gee;
using Xmpp;
using Xmpp.Xep;
using Dino.Entities;
namespace Dino {
@ -20,12 +19,6 @@ public class FileManager : StreamInteractionModule, Object {
private Gee.List<FileEncryptor> file_encryptors = new ArrayList<FileEncryptor>();
private Gee.List<FileDecryptor> file_decryptors = new ArrayList<FileDecryptor>();
private Gee.List<FileProvider> file_providers = new ArrayList<FileProvider>();
private Gee.List<FileMetadataProvider> file_metadata_providers = new ArrayList<FileMetadataProvider>();
public StatelessFileSharing sfs {
owned get { return stream_interactor.get_module(StatelessFileSharing.IDENTITY); }
private set { }
}
public static void start(StreamInteractor stream_interactor, Database db) {
FileManager m = new FileManager(stream_interactor, db);
@ -43,24 +36,6 @@ public class FileManager : StreamInteractionModule, Object {
this.add_provider(new JingleFileProvider(stream_interactor));
this.add_sender(new JingleFileSender(stream_interactor));
this.add_metadata_provider(new GenericFileMetadataProvider());
this.add_metadata_provider(new ImageFileMetadataProvider());
}
public const int HTTP_PROVIDER_ID = 0;
public const int SFS_PROVIDER_ID = 2;
public FileProvider? select_file_provider(FileTransfer file_transfer) {
bool http_usable = file_transfer.provider == SFS_PROVIDER_ID;
foreach (FileProvider file_provider in this.file_providers) {
if (file_transfer.provider == file_provider.get_id()) {
return file_provider;
}
if (http_usable && file_provider.get_id() == HTTP_PROVIDER_ID) {
return file_provider;
}
}
return null;
}
public async HashMap<int, long> get_file_size_limits(Conversation conversation) {
@ -85,15 +60,11 @@ public class FileManager : StreamInteractionModule, Object {
file_transfer.local_time = new DateTime.now_utc();
file_transfer.encryption = conversation.encryption;
Xep.FileMetadataElement.FileMetadata metadata = new Xep.FileMetadataElement.FileMetadata();
foreach (FileMetadataProvider file_metadata_provider in this.file_metadata_providers) {
if (file_metadata_provider.supports_file(file)) {
yield file_metadata_provider.fill_metadata(file, metadata);
}
}
file_transfer.file_metadata = metadata;
try {
FileInfo file_info = file.query_info("*", FileQueryInfoFlags.NONE);
file_transfer.file_name = file_info.get_display_name();
file_transfer.mime_type = file_info.get_content_type();
file_transfer.size = (int)file_info.get_size();
file_transfer.input_stream = yield file.read_async();
yield save_file(file_transfer);
@ -148,20 +119,7 @@ public class FileManager : StreamInteractionModule, Object {
file_send_data = file_encryptor.preprocess_send_file(conversation, file_transfer, file_send_data, file_meta);
}
file_transfer.state = FileTransfer.State.IN_PROGRESS;
// Update current download progress in the FileTransfer
LimitInputStream? limit_stream = file_transfer.input_stream as LimitInputStream;
if (limit_stream == null) {
limit_stream = new LimitInputStream(file_transfer.input_stream, file_meta.size);
file_transfer.input_stream = limit_stream;
}
if (limit_stream != null) {
limit_stream.bind_property("retrieved-bytes", file_transfer, "transferred-bytes", BindingFlags.SYNC_CREATE);
}
yield file_sender.send_file(conversation, file_transfer, file_send_data, file_meta);
file_transfer.state = FileTransfer.State.COMPLETE;
} catch (Error e) {
warning("Send file error: %s", e.message);
@ -172,7 +130,12 @@ public class FileManager : StreamInteractionModule, Object {
public async void download_file(FileTransfer file_transfer) {
Conversation conversation = stream_interactor.get_module(ConversationManager.IDENTITY).get_conversation(file_transfer.counterpart.bare_jid, file_transfer.account);
FileProvider? file_provider = this.select_file_provider(file_transfer);
FileProvider? file_provider = null;
foreach (FileProvider fp in file_providers) {
if (file_transfer.provider == fp.get_id()) {
file_provider = fp;
}
}
yield download_file_internal(file_provider, file_transfer, conversation);
}
@ -211,10 +174,6 @@ public class FileManager : StreamInteractionModule, Object {
file_decryptors.add(decryptor);
}
public void add_metadata_provider(FileMetadataProvider file_metadata_provider) {
file_metadata_providers.add(file_metadata_provider);
}
public bool is_sender_trustworthy(FileTransfer file_transfer, Conversation conversation) {
if (file_transfer.direction == FileTransfer.DIRECTION_SENT) return true;
@ -252,11 +211,7 @@ public class FileManager : StreamInteractionModule, Object {
private async void download_file_internal(FileProvider file_provider, FileTransfer file_transfer, Conversation conversation) {
try {
// Get meta info
FileReceiveData? receive_data = file_provider.get_file_receive_data(file_transfer);
if (receive_data == null) {
warning("Don't have download data (yet)");
return;
}
FileReceiveData receive_data = file_provider.get_file_receive_data(file_transfer);
FileDecryptor? file_decryptor = null;
foreach (FileDecryptor decryptor in file_decryptors) {
if (decryptor.can_decrypt_file(conversation, file_transfer, receive_data)) {
@ -271,6 +226,9 @@ public class FileManager : StreamInteractionModule, Object {
FileMeta file_meta = yield get_file_meta(file_provider, file_transfer, conversation, receive_data);
InputStream? input_stream = null;
// Download and decrypt file
file_transfer.state = FileTransfer.State.IN_PROGRESS;
@ -278,76 +236,31 @@ public class FileManager : StreamInteractionModule, Object {
file_meta = file_decryptor.prepare_download_file(conversation, file_transfer, receive_data, file_meta);
}
InputStream download_input_stream = yield file_provider.download(file_transfer, receive_data, file_meta);
InputStream input_stream = download_input_stream;
input_stream = yield file_provider.download(file_transfer, receive_data, file_meta);
if (file_decryptor != null) {
input_stream = yield file_decryptor.decrypt_file(input_stream, conversation, file_transfer, receive_data);
}
// Update current download progress in the FileTransfer
LimitInputStream? limit_stream = download_input_stream as LimitInputStream;
if (limit_stream != null) {
limit_stream.bind_property("retrieved-bytes", file_transfer, "transferred-bytes", BindingFlags.SYNC_CREATE);
}
// Save file
string filename = Random.next_int().to_string("%x") + "_" + file_transfer.file_name;
File file = File.new_for_path(Path.build_filename(get_storage_dir(), filename));
// libsoup doesn't properly support splicing
OutputStream os = file.create(FileCreateFlags.REPLACE_DESTINATION);
uint8[] buffer = new uint8[1024];
ssize_t read;
while ((read = yield input_stream.read_async(buffer, Priority.LOW, file_transfer.cancellable)) > 0) {
buffer.length = (int) read;
yield os.write_async(buffer, Priority.LOW, file_transfer.cancellable);
buffer.length = 1024;
}
yield input_stream.close_async(Priority.LOW, file_transfer.cancellable);
yield os.close_async(Priority.LOW, file_transfer.cancellable);
// Verify the hash of the downloaded file, if it is known
var supported_hashes = Xep.CryptographicHashes.get_supported_hashes(file_transfer.hashes);
if (!supported_hashes.is_empty) {
var checksum_types = new ArrayList<ChecksumType>();
var hashes = new HashMap<ChecksumType, string>();
foreach (var hash in supported_hashes) {
var checksum_type = Xep.CryptographicHashes.hash_string_to_type(hash.algo);
checksum_types.add(checksum_type);
hashes[checksum_type] = hash.val;
}
var computed_hashes = yield compute_file_hashes(file, checksum_types);
foreach (var checksum_type in hashes.keys) {
if (hashes[checksum_type] != computed_hashes[checksum_type]) {
warning("Hash of downloaded file does not equal advertised hash, discarding: %s. %s should be %s, was %s",
file_transfer.file_name, checksum_type.to_string(), hashes[checksum_type], computed_hashes[checksum_type]);
FileUtils.remove(file.get_path());
file_transfer.state = FileTransfer.State.FAILED;
return;
}
}
}
yield os.splice_async(input_stream, OutputStreamSpliceFlags.CLOSE_SOURCE|OutputStreamSpliceFlags.CLOSE_TARGET);
file_transfer.path = file.get_basename();
file_transfer.input_stream = yield file.read_async();
FileInfo file_info = file_transfer.get_file().query_info("*", FileQueryInfoFlags.NONE);
file_transfer.mime_type = file_info.get_content_type();
file_transfer.state = FileTransfer.State.COMPLETE;
} catch (IOError.CANCELLED e) {
print("cancelled\n");
} catch (Error e) {
warning("Error downloading file: %s", e.message);
if (file_transfer.provider == 0 || file_transfer.provider == FileManager.SFS_PROVIDER_ID) {
file_transfer.state = FileTransfer.State.NOT_STARTED;
} else {
file_transfer.state = FileTransfer.State.FAILED;
}
file_transfer.state = FileTransfer.State.FAILED;
}
}
public FileTransfer create_file_transfer_from_provider_incoming(FileProvider file_provider, string info, Jid from, DateTime time, DateTime local_time, Conversation conversation, FileReceiveData receive_data, FileMeta file_meta) {
private async void handle_incoming_file(FileProvider file_provider, string info, Jid from, DateTime time, DateTime local_time, Conversation conversation, FileReceiveData receive_data, FileMeta file_meta) {
FileTransfer file_transfer = new FileTransfer();
file_transfer.account = conversation.account;
file_transfer.counterpart = file_transfer.direction == FileTransfer.DIRECTION_RECEIVED ? from : conversation.counterpart;
@ -355,13 +268,8 @@ public class FileManager : StreamInteractionModule, Object {
file_transfer.ourpart = stream_interactor.get_module(MucManager.IDENTITY).get_own_jid(conversation.counterpart, conversation.account) ?? conversation.account.bare_jid;
file_transfer.direction = from.equals(file_transfer.ourpart) ? FileTransfer.DIRECTION_SENT : FileTransfer.DIRECTION_RECEIVED;
} else {
if (from.equals_bare(conversation.account.bare_jid)) {
file_transfer.ourpart = from;
file_transfer.direction = FileTransfer.DIRECTION_SENT;
} else {
file_transfer.ourpart = conversation.account.full_jid;
file_transfer.direction = FileTransfer.DIRECTION_RECEIVED;
}
file_transfer.ourpart = conversation.account.full_jid;
file_transfer.direction = from.equals_bare(file_transfer.ourpart) ? FileTransfer.DIRECTION_SENT : FileTransfer.DIRECTION_RECEIVED;
}
file_transfer.time = time;
file_transfer.local_time = local_time;
@ -379,25 +287,19 @@ public class FileManager : StreamInteractionModule, Object {
}
}
return file_transfer;
}
private async void handle_incoming_file(FileProvider file_provider, string info, Jid from, DateTime time, DateTime local_time, Conversation conversation, FileReceiveData receive_data, FileMeta file_meta) {
FileTransfer file_transfer = create_file_transfer_from_provider_incoming(file_provider, info, from, time, local_time, conversation, receive_data, file_meta);
stream_interactor.get_module(FileTransferStorage.IDENTITY).add_file(file_transfer);
if (is_sender_trustworthy(file_transfer, conversation)) {
try {
yield get_file_meta(file_provider, file_transfer, conversation, receive_data);
if (file_transfer.size >= 0 && file_transfer.size < 5000000) {
yield download_file_internal(file_provider, file_transfer, conversation);
}
} catch (Error e) {
warning("Error downloading file: %s", e.message);
file_transfer.state = FileTransfer.State.FAILED;
}
if (file_transfer.size >= 0 && file_transfer.size < 5000000) {
download_file_internal.begin(file_provider, file_transfer, conversation, (_, res) => {
download_file_internal.end(res);
});
}
}
conversation.last_active = file_transfer.time;
@ -409,10 +311,10 @@ public class FileManager : StreamInteractionModule, Object {
string filename = Random.next_int().to_string("%x") + "_" + file_transfer.file_name;
File file = File.new_for_path(Path.build_filename(get_storage_dir(), filename));
OutputStream os = file.create(FileCreateFlags.REPLACE_DESTINATION);
yield os.splice_async(file_transfer.input_stream, OutputStreamSpliceFlags.CLOSE_SOURCE | OutputStreamSpliceFlags.CLOSE_TARGET);
yield os.splice_async(file_transfer.input_stream, OutputStreamSpliceFlags.CLOSE_SOURCE|OutputStreamSpliceFlags.CLOSE_TARGET);
file_transfer.state = FileTransfer.State.COMPLETE;
file_transfer.path = filename;
file_transfer.input_stream = new LimitInputStream(yield file.read_async(), file_transfer.size);
file_transfer.input_stream = yield file.read_async();
} catch (Error e) {
throw new FileSendError.SAVE_FAILED("Saving file error: %s".printf(e.message));
}
@ -425,10 +327,10 @@ public errordomain FileSendError {
SAVE_FAILED
}
// Get rid of this Error and pass IoErrors instead - DOWNLOAD_FAILED already removed
public errordomain FileReceiveError {
GET_METADATA_FAILED,
DECRYPTION_FAILED
DECRYPTION_FAILED,
DOWNLOAD_FAILED
}
public class FileMeta {
@ -466,7 +368,7 @@ public interface FileProvider : Object {
public abstract FileReceiveData? get_file_receive_data(FileTransfer file_transfer);
public abstract async FileMeta get_meta_info(FileTransfer file_transfer, FileReceiveData receive_data, FileMeta file_meta) throws FileReceiveError;
public abstract async InputStream download(FileTransfer file_transfer, FileReceiveData receive_data, FileMeta file_meta) throws IOError;
public abstract async InputStream download(FileTransfer file_transfer, FileReceiveData receive_data, FileMeta file_meta) throws FileReceiveError;
public abstract int get_id();
}

View File

@ -14,8 +14,6 @@ namespace Dino {
private Database db;
private WeakMap<int, FileTransfer> files_by_db_id = new WeakMap<int, FileTransfer>();
private WeakMap<int, FileTransfer> files_by_message_id = new WeakMap<int, FileTransfer>();
private WeakMap<string, FileTransfer> files_by_message_and_file_id = new WeakMap<string, FileTransfer>();
public static void start(StreamInteractor stream_interactor, Database db) {
FileTransferStorage m = new FileTransferStorage(stream_interactor, db);
@ -43,42 +41,6 @@ namespace Dino {
return create_file_from_row_opt(row_option, conversation);
}
// Http file transfers store the corresponding message id in the `info` field
public FileTransfer? get_file_by_message_id(int id, Conversation conversation) {
FileTransfer? file_transfer = files_by_message_id[id];
if (file_transfer != null) {
return file_transfer;
}
RowOption row_option = db.file_transfer.select()
.with(db.file_transfer.info, "=", id.to_string())
.single()
.row();
return create_file_from_row_opt(row_option, conversation);
}
public FileTransfer get_files_by_message_and_file_id(int message_id, string file_sharing_id, Conversation conversation) {
string combined_identifier = message_id.to_string() + file_sharing_id;
FileTransfer? file_transfer = files_by_message_and_file_id[combined_identifier];
if (file_transfer == null) {
RowOption row_option = db.file_transfer.select()
.with(db.file_transfer.info, "=", message_id.to_string())
.with(db.file_transfer.file_sharing_id, "=", file_sharing_id)
.single()
.row();
file_transfer = create_file_from_row_opt(row_option, conversation);
}
// There can be collisions in the combined identifier, check it's the correct FileTransfer
if (file_transfer != null && file_transfer.info == message_id.to_string() && file_transfer.file_sharing_id == file_sharing_id) {
return file_transfer;
}
return null;
}
private FileTransfer? create_file_from_row_opt(RowOption row_opt, Conversation conversation) {
if (!row_opt.is_present()) return null;
@ -99,15 +61,6 @@ namespace Dino {
private void cache_file(FileTransfer file_transfer) {
files_by_db_id[file_transfer.id] = file_transfer;
if (file_transfer.info != null && file_transfer.info != "") {
files_by_message_id[int.parse(file_transfer.info)] = file_transfer;
if (file_transfer.file_sharing_id != null && file_transfer.info != null) {
string combined_identifier = file_transfer.info + file_transfer.file_sharing_id;
files_by_message_and_file_id[combined_identifier] = file_transfer;
}
}
}
}
}

View File

@ -1,563 +0,0 @@
using Gee;
using Xmpp;
using Xmpp.Xep;
using Dino.Entities;
using Qlite;
public class Dino.HistorySync {
private StreamInteractor stream_interactor;
private Database db;
public HashMap<Account, HashMap<Jid, int>> current_catchup_id = new HashMap<Account, HashMap<Jid, int>>(Account.hash_func, Account.equals_func);
public WeakMap<Account, XmppStream> sync_streams = new WeakMap<Account, XmppStream>(Account.hash_func, Account.equals_func);
public HashMap<Account, HashMap<Jid, Cancellable>> cancellables = new HashMap<Account, HashMap<Jid, Cancellable>>(Account.hash_func, Account.equals_func);
public HashMap<Account, HashMap<string, DateTime>> mam_times = new HashMap<Account, HashMap<string, DateTime>>();
public HashMap<string, int> hitted_range = new HashMap<string, int>();
// Server ID of the latest message of the previous segment
public HashMap<Account, string> catchup_until_id = new HashMap<Account, string>(Account.hash_func, Account.equals_func);
// Time of the latest message of the previous segment
public HashMap<Account, DateTime> catchup_until_time = new HashMap<Account, DateTime>(Account.hash_func, Account.equals_func);
private HashMap<string, Gee.List<Xmpp.MessageStanza>> stanzas = new HashMap<string, Gee.List<Xmpp.MessageStanza>>();
public class HistorySync(Database db, StreamInteractor stream_interactor) {
this.stream_interactor = stream_interactor;
this.db = db;
stream_interactor.account_added.connect(on_account_added);
stream_interactor.stream_negotiated.connect((account, stream) => {
if (current_catchup_id.has_key(account)) {
debug("MAM: [%s] Reset catchup_id", account.bare_jid.to_string());
current_catchup_id[account].clear();
}
});
}
public bool process(Account account, Xmpp.MessageStanza message_stanza) {
var mam_flag = Xmpp.MessageArchiveManagement.MessageFlag.get_flag(message_stanza);
if (mam_flag != null) {
process_mam_message(account, message_stanza, mam_flag);
return true;
} else {
update_latest_db_range(account, message_stanza);
return false;
}
}
public void update_latest_db_range(Account account, Xmpp.MessageStanza message_stanza) {
Jid mam_server = stream_interactor.get_module(MucManager.IDENTITY).might_be_groupchat(message_stanza.from.bare_jid, account) ? message_stanza.from.bare_jid : account.bare_jid;
if (!current_catchup_id.has_key(account) || !current_catchup_id[account].has_key(mam_server)) return;
string? stanza_id = UniqueStableStanzaIDs.get_stanza_id(message_stanza, mam_server);
if (stanza_id == null) return;
db.mam_catchup.update()
.with(db.mam_catchup.id, "=", current_catchup_id[account][mam_server])
.set(db.mam_catchup.to_time, (long)new DateTime.now_utc().to_unix())
.set(db.mam_catchup.to_id, stanza_id)
.perform();
}
public void process_mam_message(Account account, Xmpp.MessageStanza message_stanza, Xmpp.MessageArchiveManagement.MessageFlag mam_flag) {
Jid mam_server = mam_flag.sender_jid;
Jid message_author = message_stanza.from;
// MUC servers may only send MAM messages from that MUC
bool is_muc_mam = stream_interactor.get_module(MucManager.IDENTITY).might_be_groupchat(mam_server, account) &&
message_author.equals_bare(mam_server);
bool from_our_server = mam_server.equals_bare(account.bare_jid);
if (!is_muc_mam && !from_our_server) {
warning("Received alleged MAM message from %s, ignoring", mam_server.to_string());
return;
}
if (!stanzas.has_key(mam_flag.query_id)) stanzas[mam_flag.query_id] = new ArrayList<Xmpp.MessageStanza>();
stanzas[mam_flag.query_id].add(message_stanza);
}
private void on_unprocessed_message(Account account, XmppStream stream, MessageStanza message) {
// Check that it's a legit MAM server
bool is_muc_mam = stream_interactor.get_module(MucManager.IDENTITY).might_be_groupchat(message.from, account);
bool from_our_server = message.from.equals_bare(account.bare_jid);
if (!is_muc_mam && !from_our_server) return;
// Get the server time of the message and store it in `mam_times`
string? id = message.stanza.get_deep_attribute(Xmpp.MessageArchiveManagement.NS_URI + ":result", "id");
if (id == null) return;
StanzaNode? delay_node = message.stanza.get_deep_subnode(Xmpp.MessageArchiveManagement.NS_URI + ":result", StanzaForwarding.NS_URI + ":forwarded", DelayedDelivery.NS_URI + ":delay");
if (delay_node == null) {
warning("MAM result did not contain delayed time %s", message.stanza.to_string());
return;
}
DateTime? time = DelayedDelivery.get_time_for_node(delay_node);
if (time == null) return;
mam_times[account][id] = time;
// Check if this is the target message
string? query_id = message.stanza.get_deep_attribute(Xmpp.MessageArchiveManagement.NS_URI + ":result", Xmpp.MessageArchiveManagement.NS_URI + ":queryid");
if (query_id != null && id == catchup_until_id[account]) {
debug("[%s] Hitted range (id) %s", account.bare_jid.to_string(), id);
hitted_range[query_id] = -2;
}
}
public void on_server_id_duplicate(Account account, Xmpp.MessageStanza message_stanza, Entities.Message message) {
Xmpp.MessageArchiveManagement.MessageFlag? mam_flag = Xmpp.MessageArchiveManagement.MessageFlag.get_flag(message_stanza);
if (mam_flag == null) return;
// debug(@"MAM: [%s] Hitted range duplicate server id. id %s qid %s", account.bare_jid.to_string(), message.server_id, mam_flag.query_id);
if (catchup_until_time.has_key(account) && mam_flag.server_time.compare(catchup_until_time[account]) < 0) {
hitted_range[mam_flag.query_id] = -1;
// debug(@"MAM: [%s] In range (time) %s < %s", account.bare_jid.to_string(), mam_flag.server_time.to_string(), catchup_until_time[account].to_string());
}
}
public async void fetch_everything(Account account, Jid mam_server, Cancellable? cancellable = null, DateTime until_earliest_time = new DateTime.from_unix_utc(0)) {
debug("Fetch everything for %s %s", mam_server.to_string(), until_earliest_time != null ? @"(until $until_earliest_time)" : "");
RowOption latest_row_opt = db.mam_catchup.select()
.with(db.mam_catchup.account_id, "=", account.id)
.with(db.mam_catchup.server_jid, "=", mam_server.to_string())
.with(db.mam_catchup.to_time, ">=", (long) until_earliest_time.to_unix())
.order_by(db.mam_catchup.to_time, "DESC")
.single().row();
Row? latest_row = latest_row_opt.is_present() ? latest_row_opt.inner : null;
Row? new_row = yield fetch_latest_page(account, mam_server, latest_row, until_earliest_time, cancellable);
if (new_row != null) {
current_catchup_id[account][mam_server] = new_row[db.mam_catchup.id];
} else if (latest_row != null) {
current_catchup_id[account][mam_server] = latest_row[db.mam_catchup.id];
}
// Set the previous and current row
Row? previous_row = null;
Row? current_row = null;
if (new_row != null) {
current_row = new_row;
previous_row = latest_row;
} else if (latest_row != null) {
current_row = latest_row;
RowOption previous_row_opt = db.mam_catchup.select()
.with(db.mam_catchup.account_id, "=", account.id)
.with(db.mam_catchup.server_jid, "=", mam_server.to_string())
.with(db.mam_catchup.to_time, "<", current_row[db.mam_catchup.from_time])
.with(db.mam_catchup.to_time, ">=", (long) until_earliest_time.to_unix())
.order_by(db.mam_catchup.to_time, "DESC")
.single().row();
previous_row = previous_row_opt.is_present() ? previous_row_opt.inner : null;
}
// Fetch messages between two db ranges and merge them
while (current_row != null && previous_row != null) {
if (current_row[db.mam_catchup.from_end]) return;
debug("[%s] Fetching between ranges %s - %s", mam_server.to_string(), previous_row[db.mam_catchup.to_time].to_string(), current_row[db.mam_catchup.from_time].to_string());
current_row = yield fetch_between_ranges(account, mam_server, previous_row, current_row, cancellable);
if (current_row == null) return;
RowOption previous_row_opt = db.mam_catchup.select()
.with(db.mam_catchup.account_id, "=", account.id)
.with(db.mam_catchup.server_jid, "=", mam_server.to_string())
.with(db.mam_catchup.to_time, "<", current_row[db.mam_catchup.from_time])
.with(db.mam_catchup.to_time, ">=", (long) until_earliest_time.to_unix())
.order_by(db.mam_catchup.to_time, "DESC")
.single().row();
previous_row = previous_row_opt.is_present() ? previous_row_opt.inner : null;
}
// We're at the earliest range. Try to expand it even further back.
if (current_row == null || current_row[db.mam_catchup.from_end]) return;
// We don't want to fetch before the earliest range over and over again in MUCs if it's after until_earliest_time.
// For now, don't query if we are within a week of until_earliest_time
if (until_earliest_time != null &&
current_row[db.mam_catchup.from_time] > until_earliest_time.add(-TimeSpan.DAY * 7).to_unix()) return;
yield fetch_before_range(account, mam_server, current_row, until_earliest_time);
}
// Fetches the latest page (up to previous db row). Extends the previous db row if it was reached, creates a new row otherwise.
public async Row? fetch_latest_page(Account account, Jid mam_server, Row? latest_row, DateTime? until_earliest_time, Cancellable? cancellable = null) {
debug("[%s | %s] Fetching latest page", account.bare_jid.to_string(), mam_server.to_string());
int latest_row_id = -1;
DateTime latest_message_time = until_earliest_time;
string? latest_message_id = null;
if (latest_row != null) {
latest_row_id = latest_row[db.mam_catchup.id];
latest_message_time = (new DateTime.from_unix_utc(latest_row[db.mam_catchup.to_time])).add_minutes(-5);
latest_message_id = latest_row[db.mam_catchup.to_id];
// Make sure we only fetch to until_earliest_time if latest_message_time is further back
if (until_earliest_time != null && latest_message_time.compare(until_earliest_time) < 0) {
latest_message_time = until_earliest_time.add_minutes(-5);
latest_message_id = null;
}
}
var query_params = new Xmpp.MessageArchiveManagement.V2.MamQueryParams.query_latest(mam_server, latest_message_time, latest_message_id);
PageRequestResult page_result = yield get_mam_page(account, query_params, null, cancellable);
debug("[%s | %s] Latest page result: %s", account.bare_jid.to_string(), mam_server.to_string(), page_result.page_result.to_string());
if (page_result.page_result == PageResult.Error || page_result.page_result == PageResult.Cancelled) {
return null;
}
// Catchup finished within first page. Update latest db entry.
if (latest_row_id != -1 &&
page_result.page_result in new PageResult[] { PageResult.TargetReached, PageResult.NoMoreMessages }) {
if (page_result.stanzas == null) return null;
string latest_mam_id = page_result.query_result.last;
long latest_mam_time = (long) mam_times[account][latest_mam_id].to_unix();
var query = db.mam_catchup.update()
.with(db.mam_catchup.id, "=", latest_row_id)
.set(db.mam_catchup.to_time, latest_mam_time)
.set(db.mam_catchup.to_id, latest_mam_id);
if (page_result.page_result == PageResult.NoMoreMessages) {
// If the server doesn't have more messages, store that this range is at its end.
query.set(db.mam_catchup.from_end, true);
}
query.perform();
return null;
}
if (page_result.query_result.first == null || page_result.query_result.last == null) {
return null;
}
// Either we need to fetch more pages or this is the first db entry ever
debug("[%s | %s] Creating new db range for latest page", account.bare_jid.to_string(), mam_server.to_string());
string from_id = page_result.query_result.first;
string to_id = page_result.query_result.last;
if (!mam_times[account].has_key(from_id) || !mam_times[account].has_key(to_id)) {
debug("Missing from/to id %s %s", from_id, to_id);
return null;
}
long from_time = (long) mam_times[account][from_id].to_unix();
long to_time = (long) mam_times[account][to_id].to_unix();
int new_row_id = (int) db.mam_catchup.insert()
.value(db.mam_catchup.account_id, account.id)
.value(db.mam_catchup.server_jid, mam_server.to_string())
.value(db.mam_catchup.from_id, from_id)
.value(db.mam_catchup.from_time, from_time)
.value(db.mam_catchup.from_end, page_result.page_result == PageResult.NoMoreMessages)
.value(db.mam_catchup.to_id, to_id)
.value(db.mam_catchup.to_time, to_time)
.perform();
return db.mam_catchup.select().with(db.mam_catchup.id, "=", new_row_id).single().row().inner;
}
/** Fetches messages between the end of `earlier_range` and start of `later_range`
** Merges the `earlier_range` db row into the `later_range` db row.
** @return The resulting range comprising `earlier_range`, `later_rage`, and everything in between. null if fetching/merge failed.
**/
private async Row? fetch_between_ranges(Account account, Jid mam_server, Row earlier_range, Row later_range, Cancellable? cancellable = null) {
int later_range_id = (int) later_range[db.mam_catchup.id];
DateTime earliest_time = new DateTime.from_unix_utc(earlier_range[db.mam_catchup.to_time]);
DateTime latest_time = new DateTime.from_unix_utc(later_range[db.mam_catchup.from_time]);
debug("[%s | %s] Fetching between %s (%s) and %s (%s)", account.bare_jid.to_string(), mam_server.to_string(), earliest_time.to_string(), earlier_range[db.mam_catchup.to_id], latest_time.to_string(), later_range[db.mam_catchup.from_id]);
var query_params = new Xmpp.MessageArchiveManagement.V2.MamQueryParams.query_between(mam_server,
earliest_time, earlier_range[db.mam_catchup.to_id],
latest_time, later_range[db.mam_catchup.from_id]);
PageRequestResult page_result = yield fetch_query(account, query_params, later_range_id, cancellable);
if (page_result.page_result == PageResult.TargetReached || page_result.page_result == PageResult.NoMoreMessages) {
debug("[%s | %s] Merging range %i into %i", account.bare_jid.to_string(), mam_server.to_string(), earlier_range[db.mam_catchup.id], later_range_id);
// Merge earlier range into later one.
db.mam_catchup.update()
.with(db.mam_catchup.id, "=", later_range_id)
.set(db.mam_catchup.from_time, earlier_range[db.mam_catchup.from_time])
.set(db.mam_catchup.from_id, earlier_range[db.mam_catchup.from_id])
.set(db.mam_catchup.from_end, earlier_range[db.mam_catchup.from_end])
.perform();
db.mam_catchup.delete().with(db.mam_catchup.id, "=", earlier_range[db.mam_catchup.id]).perform();
// Return the updated version of the later range
return db.mam_catchup.select().with(db.mam_catchup.id, "=", later_range_id).single().row().inner;
}
return null;
}
private async void fetch_before_range(Account account, Jid mam_server, Row range, DateTime? until_earliest_time, Cancellable? cancellable = null) {
DateTime latest_time = new DateTime.from_unix_utc(range[db.mam_catchup.from_time]);
string latest_id = range[db.mam_catchup.from_id];
debug("[%s | %s] Fetching before range < %s, %s", account.bare_jid.to_string(), mam_server.to_string(), latest_time.to_string(), latest_id);
Xmpp.MessageArchiveManagement.V2.MamQueryParams query_params;
if (until_earliest_time == null) {
query_params = new Xmpp.MessageArchiveManagement.V2.MamQueryParams.query_before(mam_server, latest_time, latest_id);
} else {
query_params = new Xmpp.MessageArchiveManagement.V2.MamQueryParams.query_between(
mam_server,
until_earliest_time, null,
latest_time, latest_id
);
}
yield fetch_query(account, query_params, range[db.mam_catchup.id], cancellable);
}
/**
* Iteratively fetches all pages returned for a query (until a PageResult other than MorePagesAvailable is returned)
* @return The last PageRequestResult result
**/
private async PageRequestResult fetch_query(Account account, Xmpp.MessageArchiveManagement.V2.MamQueryParams query_params, int db_id, Cancellable? cancellable = null) {
debug("[%s | %s] Fetch query %s - %s", account.bare_jid.to_string(), query_params.mam_server.to_string(), query_params.start != null ? query_params.start.to_string() : "", query_params.end != null ? query_params.end.to_string() : "");
PageRequestResult? page_result = null;
do {
page_result = yield get_mam_page(account, query_params, page_result, cancellable);
debug("[%s | %s] Page result %s (got stanzas: %s)", account.bare_jid.to_string(), query_params.mam_server.to_string(), page_result.page_result.to_string(), (page_result.stanzas != null).to_string());
if (page_result.page_result == PageResult.Error || page_result.page_result == PageResult.Cancelled || page_result.query_result.first == null) return page_result;
string earliest_mam_id = page_result.query_result.first;
long earliest_mam_time = (long)mam_times[account][earliest_mam_id].to_unix();
debug("Updating %s to %s, %s", query_params.mam_server.to_string(), earliest_mam_time.to_string(), earliest_mam_id);
var query = db.mam_catchup.update()
.with(db.mam_catchup.id, "=", db_id)
.set(db.mam_catchup.from_time, earliest_mam_time)
.set(db.mam_catchup.from_id, earliest_mam_id);
if (page_result.page_result == PageResult.NoMoreMessages) {
// If the server doesn't have more messages, store that this range is at its end.
query.set(db.mam_catchup.from_end, true);
}
query.perform();
} while (page_result.page_result == PageResult.MorePagesAvailable);
return page_result;
}
enum PageResult {
MorePagesAvailable,
TargetReached,
NoMoreMessages,
Error,
Cancelled
}
/**
* prev_page_result: null if this is the first page request
**/
private async PageRequestResult get_mam_page(Account account, Xmpp.MessageArchiveManagement.V2.MamQueryParams query_params, PageRequestResult? prev_page_result, Cancellable? cancellable = null) {
XmppStream stream = stream_interactor.get_stream(account);
Xmpp.MessageArchiveManagement.QueryResult query_result = null;
if (prev_page_result == null) {
query_result = yield Xmpp.MessageArchiveManagement.V2.query_archive(stream, query_params, cancellable);
} else {
query_result = yield Xmpp.MessageArchiveManagement.V2.page_through_results(stream, query_params, prev_page_result.query_result, cancellable);
}
return yield process_query_result(account, query_params, query_result, cancellable);
}
private async PageRequestResult process_query_result(Account account, Xmpp.MessageArchiveManagement.V2.MamQueryParams query_params, Xmpp.MessageArchiveManagement.QueryResult query_result, Cancellable? cancellable = null) {
PageResult page_result = PageResult.MorePagesAvailable;
if (query_result.malformed || query_result.error) {
page_result = PageResult.Error;
}
// We wait until all the messages from the page are processed (and we got the `mam_times` from them)
Idle.add(process_query_result.callback, Priority.LOW);
yield;
// We might have successfully reached the target or the server doesn't have all messages stored anymore
// If it's the former, we'll overwrite the value with PageResult.MorePagesAvailable below.
if (query_result.complete) {
page_result = PageResult.NoMoreMessages;
}
string query_id = query_params.query_id;
string? after_id = query_params.start_id;
var stanzas_for_query = stanzas.has_key(query_id) && !stanzas[query_id].is_empty ? stanzas[query_id] : null;
if (cancellable != null && cancellable.is_cancelled()) {
stanzas.unset(query_id);
return new PageRequestResult(PageResult.Cancelled, query_result, stanzas_for_query);
}
if (stanzas_for_query != null) {
// Check it we reached our target (from_id)
foreach (Xmpp.MessageStanza message in stanzas_for_query) {
Xmpp.MessageArchiveManagement.MessageFlag? mam_message_flag = Xmpp.MessageArchiveManagement.MessageFlag.get_flag(message);
if (mam_message_flag != null && mam_message_flag.mam_id != null) {
if (after_id != null && mam_message_flag.mam_id == after_id) {
// Successfully fetched the whole range
yield send_messages_back_into_pipeline(account, query_id, cancellable);
if (cancellable != null && cancellable.is_cancelled()) {
return new PageRequestResult(PageResult.Cancelled, query_result, stanzas_for_query);
}
return new PageRequestResult(PageResult.TargetReached, query_result, stanzas_for_query);
}
}
}
if (hitted_range.has_key(query_id) && hitted_range[query_id] == -2) {
// Message got filtered out by xmpp-vala, but succesful range fetch nevertheless
yield send_messages_back_into_pipeline(account, query_id);
if (cancellable != null && cancellable.is_cancelled()) {
return new PageRequestResult(PageResult.Cancelled, query_result, stanzas_for_query);
}
return new PageRequestResult(PageResult.TargetReached, query_result, stanzas_for_query);
}
}
yield send_messages_back_into_pipeline(account, query_id);
if (cancellable != null && cancellable.is_cancelled()) {
page_result = PageResult.Cancelled;
}
return new PageRequestResult(page_result, query_result, stanzas_for_query);
}
private async void send_messages_back_into_pipeline(Account account, string query_id, Cancellable? cancellable = null) {
if (!stanzas.has_key(query_id)) return;
foreach (Xmpp.MessageStanza message in stanzas[query_id]) {
if (cancellable != null && cancellable.is_cancelled()) break;
yield stream_interactor.get_module(MessageProcessor.IDENTITY).run_pipeline_announce(account, message);
}
stanzas.unset(query_id);
}
private void on_account_added(Account account) {
cleanup_db_ranges(db, account);
mam_times[account] = new HashMap<string, DateTime>();
stream_interactor.connection_manager.stream_attached_modules.connect((account, stream) => {
if (!current_catchup_id.has_key(account)) {
current_catchup_id[account] = new HashMap<Jid, int>(Jid.hash_func, Jid.equals_func);
} else {
current_catchup_id[account].clear();
}
});
stream_interactor.module_manager.get_module(account, Xmpp.MessageArchiveManagement.Module.IDENTITY).feature_available.connect((stream) => {
consider_fetch_everything(account, stream);
});
stream_interactor.module_manager.get_module(account, Xmpp.MessageModule.IDENTITY).received_message_unprocessed.connect((stream, message) => {
on_unprocessed_message(account, stream, message);
});
}
private void consider_fetch_everything(Account account, XmppStream stream) {
if (sync_streams.has(account, stream)) return;
debug("[%s] MAM available", account.bare_jid.to_string());
sync_streams[account] = stream;
if (!cancellables.has_key(account)) {
cancellables[account] = new HashMap<Jid, Cancellable>();
}
if (cancellables[account].has_key(account.bare_jid)) {
cancellables[account][account.bare_jid].cancel();
}
cancellables[account][account.bare_jid] = new Cancellable();
fetch_everything.begin(account, account.bare_jid, cancellables[account][account.bare_jid], new DateTime.from_unix_utc(0), (_, res) => {
fetch_everything.end(res);
cancellables[account].unset(account.bare_jid);
});
}
public static void cleanup_db_ranges(Database db, Account account) {
var ranges = new HashMap<Jid, ArrayList<MamRange>>(Jid.hash_func, Jid.equals_func);
foreach (Row row in db.mam_catchup.select().with(db.mam_catchup.account_id, "=", account.id)) {
var mam_range = new MamRange();
mam_range.id = row[db.mam_catchup.id];
mam_range.server_jid = new Jid(row[db.mam_catchup.server_jid]);
mam_range.from_time = row[db.mam_catchup.from_time];
mam_range.from_id = row[db.mam_catchup.from_id];
mam_range.from_end = row[db.mam_catchup.from_end];
mam_range.to_time = row[db.mam_catchup.to_time];
mam_range.to_id = row[db.mam_catchup.to_id];
if (!ranges.has_key(mam_range.server_jid)) ranges[mam_range.server_jid] = new ArrayList<MamRange>();
ranges[mam_range.server_jid].add(mam_range);
}
var to_delete = new ArrayList<MamRange>();
foreach (Jid server_jid in ranges.keys) {
foreach (var range1 in ranges[server_jid]) {
if (to_delete.contains(range1)) continue;
foreach (MamRange range2 in ranges[server_jid]) {
debug("[%s | %s] | %s - %s vs %s - %s", account.bare_jid.to_string(), server_jid.to_string(), range1.from_time.to_string(), range1.to_time.to_string(), range2.from_time.to_string(), range2.to_time.to_string());
if (range1 == range2 || to_delete.contains(range2)) continue;
// Check if range2 is a subset of range1
// range1: #####################
// range2: ######
if (range1.from_time <= range2.from_time && range1.to_time >= range2.to_time) {
warning("Removing db range which is a subset of %li-%li", range1.from_time, range1.to_time);
to_delete.add(range2);
continue;
}
// Check if range2 is an extension of range1 (towards earlier)
// range1: #####################
// range2: ###############
if (range1.from_time <= range2.to_time <= range1.to_time && range2.from_time <= range1.from_time) {
warning("Removing db range that overlapped %li-%li (towards earlier)", range1.from_time, range1.to_time);
db.mam_catchup.update()
.with(db.mam_catchup.id, "=", range1.id)
.set(db.mam_catchup.from_id, range2.from_id)
.set(db.mam_catchup.from_time, range2.from_time)
.set(db.mam_catchup.from_end, range2.from_end)
.perform();
to_delete.add(range2);
continue;
}
}
}
}
foreach (MamRange row in to_delete) {
db.mam_catchup.delete().with(db.mam_catchup.id, "=", row.id).perform();
warning("Removing db range %s %li-%li", row.server_jid.to_string(), row.from_time, row.to_time);
}
}
class MamRange {
public int id;
public Jid server_jid;
public long from_time;
public string from_id;
public bool from_end;
public long to_time;
public string to_id;
}
class PageRequestResult {
public Gee.List<MessageStanza> stanzas { get; set; }
public PageResult page_result { get; set; }
public Xmpp.MessageArchiveManagement.QueryResult query_result { get; set; }
public PageRequestResult(PageResult page_result, Xmpp.MessageArchiveManagement.QueryResult query_result, Gee.List<MessageStanza>? stanzas) {
this.page_result = page_result;
this.query_result = query_result;
this.stanzas = stanzas;
}
}
}

View File

@ -64,7 +64,7 @@ public class JingleFileProvider : FileProvider, Object {
public JingleFileProvider(StreamInteractor stream_interactor) {
this.stream_interactor = stream_interactor;
stream_interactor.account_added.connect(on_account_added);
stream_interactor.stream_negotiated.connect(on_stream_negotiated);
}
public FileMeta get_file_meta(FileTransfer file_transfer) throws FileReceiveError {
@ -95,27 +95,34 @@ public class JingleFileProvider : FileProvider, Object {
return Encryption.NONE;
}
public async InputStream download(FileTransfer file_transfer, FileReceiveData receive_data, FileMeta file_meta) throws IOError {
public async InputStream download(FileTransfer file_transfer, FileReceiveData receive_data, FileMeta file_meta) throws FileReceiveError {
// TODO(hrxi) What should happen if `stream == null`?
XmppStream? stream = stream_interactor.get_stream(file_transfer.account);
Xmpp.Xep.JingleFileTransfer.FileTransfer? jingle_file_transfer = file_transfers[file_transfer.info];
if (jingle_file_transfer == null) {
throw new IOError.FAILED("Transfer data not available anymore");
throw new FileReceiveError.DOWNLOAD_FAILED("Transfer data not available anymore");
}
yield jingle_file_transfer.accept(stream);
return new LimitInputStream(jingle_file_transfer.stream, file_meta.size);
try {
yield jingle_file_transfer.accept(stream);
} catch (IOError e) {
throw new FileReceiveError.DOWNLOAD_FAILED("Establishing connection did not work");
}
return jingle_file_transfer.stream;
}
public int get_id() {
return 1;
}
private void on_account_added(Account account) {
private void on_stream_negotiated(Account account, XmppStream stream) {
stream_interactor.module_manager.get_module(account, Xmpp.Xep.JingleFileTransfer.Module.IDENTITY).file_incoming.connect((stream, jingle_file_transfer) => {
Conversation? conversation = stream_interactor.get_module(ConversationManager.IDENTITY).get_conversation(jingle_file_transfer.peer.bare_jid, account);
if (conversation == null) return;
if (conversation == null) {
// TODO(hrxi): What to do?
return;
}
string id = random_uuid();
file_transfers[id] = jingle_file_transfer;
FileMeta file_meta = new FileMeta();

View File

@ -39,21 +39,26 @@ public class MessageCorrection : StreamInteractionModule, MessageListener {
});
}
public void set_correction(Conversation conversation, Message message, Message old_message) {
string reference_stanza_id = old_message.edit_to ?? old_message.stanza_id;
public void send_correction(Conversation conversation, Message old_message, string correction_text) {
string stanza_id = old_message.edit_to ?? old_message.stanza_id;
outstanding_correction_nodes[message.stanza_id] = reference_stanza_id;
Message out_message = stream_interactor.get_module(MessageProcessor.IDENTITY).create_out_message(correction_text, conversation);
out_message.edit_to = stanza_id;
outstanding_correction_nodes[out_message.stanza_id] = stanza_id;
stream_interactor.get_module(MessageProcessor.IDENTITY).send_xmpp_message(out_message, conversation);
db.message_correction.insert()
.value(db.message_correction.message_id, message.id)
.value(db.message_correction.to_stanza_id, reference_stanza_id)
.perform();
.value(db.message_correction.message_id, out_message.id)
.value(db.message_correction.to_stanza_id, stanza_id)
.perform();
db.content_item.update()
.with(db.content_item.foreign_id, "=", old_message.id)
.with(db.content_item.content_type, "=", 1)
.set(db.content_item.foreign_id, message.id)
.perform();
.with(db.content_item.foreign_id, "=", old_message.id)
.with(db.content_item.content_type, "=", 1)
.set(db.content_item.foreign_id, out_message.id)
.perform();
on_received_correction(conversation, out_message.id);
}
public bool is_own_correction_allowed(Conversation conversation, Message message) {
@ -91,10 +96,9 @@ public class MessageCorrection : StreamInteractionModule, MessageListener {
public override async bool run(Entities.Message message, Xmpp.MessageStanza stanza, Conversation conversation) {
if (conversation.type_ != Conversation.Type.CHAT) {
// Don't process messages or corrections from MUC history or MUC MAM
// Don't process messages or corrections from MUC history
DateTime? mam_delay = Xep.DelayedDelivery.get_time_for_message(stanza, message.from.bare_jid);
if (mam_delay != null) return false;
if (Xmpp.MessageArchiveManagement.MessageFlag.get_flag(stanza) != null) return false;
}
string? replace_id = Xep.LastMessageCorrection.get_replace_id(stanza);
@ -139,8 +143,8 @@ public class MessageCorrection : StreamInteractionModule, MessageListener {
return false;
}
public void on_received_correction(Conversation conversation, int message_id) {
ContentItem? content_item = stream_interactor.get_module(ContentItemStore.IDENTITY).get_item_by_foreign(conversation, 1, message_id);
private void on_received_correction(Conversation conversation, int message_id) {
ContentItem? content_item = stream_interactor.get_module(ContentItemStore.IDENTITY).get_item(conversation, 1, message_id);
if (content_item != null) {
received_correction(content_item);
}

View File

@ -18,11 +18,15 @@ public class MessageProcessor : StreamInteractionModule, Object {
public signal void message_sent_or_received(Entities.Message message, Conversation conversation);
public signal void history_synced(Account account);
public HistorySync history_sync;
public MessageListenerHolder received_pipeline = new MessageListenerHolder();
private StreamInteractor stream_interactor;
private Database db;
private HashMap<Account, int> current_catchup_id = new HashMap<Account, int>(Account.hash_func, Account.equals_func);
private HashMap<Account, HashMap<string, DateTime>> mam_times = new HashMap<Account, HashMap<string, DateTime>>();
public HashMap<string, int> hitted_range = new HashMap<string, int>();
public HashMap<Account, string> catchup_until_id = new HashMap<Account, string>(Account.hash_func, Account.equals_func);
public HashMap<Account, DateTime> catchup_until_time = new HashMap<Account, DateTime>(Account.hash_func, Account.equals_func);
public static void start(StreamInteractor stream_interactor, Database db) {
MessageProcessor m = new MessageProcessor(stream_interactor, db);
@ -32,18 +36,34 @@ public class MessageProcessor : StreamInteractionModule, Object {
private MessageProcessor(StreamInteractor stream_interactor, Database db) {
this.stream_interactor = stream_interactor;
this.db = db;
this.history_sync = new HistorySync(db, stream_interactor);
received_pipeline.connect(new DeduplicateMessageListener(this));
received_pipeline.connect(new DeduplicateMessageListener(this, db));
received_pipeline.connect(new FilterMessageListener());
received_pipeline.connect(new StoreMessageListener(this, stream_interactor));
received_pipeline.connect(new StoreMessageListener(stream_interactor));
received_pipeline.connect(new StoreContentItemListener(stream_interactor));
received_pipeline.connect(new MarkupListener(stream_interactor));
received_pipeline.connect(new MamMessageListener(stream_interactor));
stream_interactor.account_added.connect(on_account_added);
stream_interactor.stream_negotiated.connect(send_unsent_chat_messages);
stream_interactor.stream_resumed.connect(send_unsent_chat_messages);
stream_interactor.connection_manager.stream_opened.connect((account, stream) => {
debug("MAM: [%s] Reset catchup_id", account.bare_jid.to_string());
current_catchup_id.unset(account);
});
}
public Entities.Message send_text(string text, Conversation conversation) {
Entities.Message message = create_out_message(text, conversation);
return send_message(message, conversation);
}
public Entities.Message send_message(Entities.Message message, Conversation conversation) {
stream_interactor.get_module(ContentItemStore.IDENTITY).insert_message(message, conversation);
send_xmpp_message(message, conversation);
message_sent(message, conversation);
return message;
}
private void convert_sending_to_unsent_msgs(Account account) {
@ -86,10 +106,43 @@ public class MessageProcessor : StreamInteractionModule, Object {
}
private void on_account_added(Account account) {
mam_times[account] = new HashMap<string, DateTime>();
stream_interactor.module_manager.get_module(account, Xmpp.MessageModule.IDENTITY).received_message.connect( (stream, message) => {
on_message_received.begin(account, message);
});
XmppStream? stream_bak = null;
stream_interactor.module_manager.get_module(account, Xmpp.Xep.MessageArchiveManagement.Module.IDENTITY).feature_available.connect( (stream) => {
if (stream == stream_bak) return;
current_catchup_id.unset(account);
stream_bak = stream;
debug("MAM: [%s] MAM available", account.bare_jid.to_string());
do_mam_catchup.begin(account);
});
stream_interactor.module_manager.get_module(account, Xmpp.MessageModule.IDENTITY).received_message_unprocessed.connect((stream, message) => {
if (!message.from.equals(account.bare_jid)) return;
Xep.MessageArchiveManagement.Flag? mam_flag = stream != null ? stream.get_flag(Xep.MessageArchiveManagement.Flag.IDENTITY) : null;
if (mam_flag == null) return;
string? id = message.stanza.get_deep_attribute(mam_flag.ns_ver + ":result", "id");
if (id == null) return;
StanzaNode? delay_node = message.stanza.get_deep_subnode(mam_flag.ns_ver + ":result", "urn:xmpp:forward:0:forwarded", "urn:xmpp:delay:delay");
if (delay_node == null) {
warning("MAM result did not contain delayed time %s", message.stanza.to_string());
return;
}
DateTime? time = DelayedDelivery.get_time_for_node(delay_node);
if (time == null) return;
mam_times[account][id] = time;
string? query_id = message.stanza.get_deep_attribute(mam_flag.ns_ver + ":result", mam_flag.ns_ver + ":queryid");
if (query_id != null && id == catchup_until_id[account]) {
debug("MAM: [%s] Hitted range (id) %s", account.bare_jid.to_string(), id);
hitted_range[query_id] = -2;
}
});
stream_interactor.module_manager.get_module(account, Xmpp.MessageModule.IDENTITY).received_error.connect((stream, message_stanza, error_stanza) => {
Message? message = null;
@ -111,20 +164,203 @@ public class MessageProcessor : StreamInteractionModule, Object {
convert_sending_to_unsent_msgs(account);
}
private async void on_message_received(Account account, Xmpp.MessageStanza message_stanza) {
private async void do_mam_catchup(Account account) {
debug("MAM: [%s] Start catchup", account.bare_jid.to_string());
string? earliest_id = null;
DateTime? earliest_time = null;
bool continue_sync = true;
// If it's a message from MAM, it's going to be processed by HistorySync which calls run_pipeline_announce later.
if (history_sync.process(account, message_stanza)) return;
while (continue_sync) {
continue_sync = false;
run_pipeline_announce.begin(account, message_stanza);
// Get previous row
var previous_qry = db.mam_catchup.select().with(db.mam_catchup.account_id, "=", account.id).order_by(db.mam_catchup.to_time, "DESC");
if (current_catchup_id.has_key(account)) {
previous_qry.with(db.mam_catchup.id, "!=", current_catchup_id[account]);
}
RowOption previous_row = previous_qry.single().row();
if (previous_row.is_present()) {
catchup_until_id[account] = previous_row[db.mam_catchup.to_id];
catchup_until_time[account] = (new DateTime.from_unix_utc(previous_row[db.mam_catchup.to_time])).add_minutes(-5);
debug("MAM: [%s] Previous entry exists", account.bare_jid.to_string());
} else {
catchup_until_id.unset(account);
catchup_until_time.unset(account);
}
string query_id = Xmpp.random_uuid();
yield get_mam_range(account, query_id, null, null, earliest_time, earliest_id);
if (!hitted_range.has_key(query_id)) {
debug("MAM: [%s] Set catchup end reached", account.bare_jid.to_string());
db.mam_catchup.update()
.set(db.mam_catchup.from_end, true)
.with(db.mam_catchup.id, "=", current_catchup_id[account])
.perform();
}
if (hitted_range.has_key(query_id)) {
if (merge_ranges(account, null)) {
RowOption current_row = db.mam_catchup.row_with(db.mam_catchup.id, current_catchup_id[account]);
bool range_from_complete = current_row[db.mam_catchup.from_end];
if (!range_from_complete) {
continue_sync = true;
earliest_id = current_row[db.mam_catchup.from_id];
earliest_time = (new DateTime.from_unix_utc(current_row[db.mam_catchup.from_time])).add_seconds(1);
}
}
}
}
}
public async void run_pipeline_announce(Account account, Xmpp.MessageStanza message_stanza) {
/*
* Merges the row with `current_catchup_id` with the previous range (optional: with `earlier_id`)
* Changes `current_catchup_id` to the previous range
*/
private bool merge_ranges(Account account, int? earlier_id) {
RowOption current_row = db.mam_catchup.row_with(db.mam_catchup.id, current_catchup_id[account]);
RowOption previous_row = null;
if (earlier_id != null) {
previous_row = db.mam_catchup.row_with(db.mam_catchup.id, earlier_id);
} else {
previous_row = db.mam_catchup.select()
.with(db.mam_catchup.account_id, "=", account.id)
.with(db.mam_catchup.id, "!=", current_catchup_id[account])
.order_by(db.mam_catchup.to_time, "DESC").single().row();
}
if (!previous_row.is_present()) {
debug("MAM: [%s] Merging: No previous row", account.bare_jid.to_string());
return false;
}
var qry = db.mam_catchup.update().with(db.mam_catchup.id, "=", previous_row[db.mam_catchup.id]);
debug("MAM: [%s] Merging %ld-%ld with %ld- %ld", account.bare_jid.to_string(), previous_row[db.mam_catchup.from_time], previous_row[db.mam_catchup.to_time], current_row[db.mam_catchup.from_time], current_row[db.mam_catchup.to_time]);
if (current_row[db.mam_catchup.from_time] < previous_row[db.mam_catchup.from_time]) {
qry.set(db.mam_catchup.from_id, current_row[db.mam_catchup.from_id])
.set(db.mam_catchup.from_time, current_row[db.mam_catchup.from_time]);
}
if (current_row[db.mam_catchup.to_time] > previous_row[db.mam_catchup.to_time]) {
qry.set(db.mam_catchup.to_id, current_row[db.mam_catchup.to_id])
.set(db.mam_catchup.to_time, current_row[db.mam_catchup.to_time]);
}
qry.perform();
current_catchup_id[account] = previous_row[db.mam_catchup.id];
db.mam_catchup.delete().with(db.mam_catchup.id, "=", current_row[db.mam_catchup.id]).perform();
return true;
}
private async bool get_mam_range(Account account, string? query_id, DateTime? from_time, string? from_id, DateTime? to_time, string? to_id) {
debug("MAM: [%s] Get range %s - %s", account.bare_jid.to_string(), from_time != null ? from_time.to_string() : "", to_time != null ? to_time.to_string() : "");
XmppStream stream = stream_interactor.get_stream(account);
Iq.Stanza? iq = yield stream.get_module(Xep.MessageArchiveManagement.Module.IDENTITY).query_archive(stream, null, query_id, from_time, from_id, to_time, to_id);
if (iq == null) {
debug(@"MAM: [%s] IQ null", account.bare_jid.to_string());
return true;
}
if (iq.stanza.get_deep_string_content("urn:xmpp:mam:2:fin", "http://jabber.org/protocol/rsm" + ":set", "first") == null) {
return true;
}
while (iq != null) {
string? earliest_id = iq.stanza.get_deep_string_content("urn:xmpp:mam:2:fin", "http://jabber.org/protocol/rsm" + ":set", "first");
if (earliest_id == null) return true;
string? latest_id = iq.stanza.get_deep_string_content("urn:xmpp:mam:2:fin", "http://jabber.org/protocol/rsm" + ":set", "last");
// We wait until all the messages from the page are processed (and we got the `mam_times` from them)
Idle.add(get_mam_range.callback, Priority.LOW);
yield;
int wait_ms = 1000;
if (mam_times[account].has_key(earliest_id) && (current_catchup_id.has_key(account) || mam_times[account].has_key(latest_id))) {
debug("MAM: [%s] Update from_id %s", account.bare_jid.to_string(), earliest_id);
if (!current_catchup_id.has_key(account)) {
debug("MAM: [%s] We get our first MAM page", account.bare_jid.to_string());
current_catchup_id[account] = (int) db.mam_catchup.insert()
.value(db.mam_catchup.account_id, account.id)
.value(db.mam_catchup.from_id, earliest_id)
.value(db.mam_catchup.from_time, (long)mam_times[account][earliest_id].to_unix())
.value(db.mam_catchup.to_id, latest_id)
.value(db.mam_catchup.to_time, (long)mam_times[account][latest_id].to_unix())
.perform();
} else {
// Update existing id
db.mam_catchup.update()
.set(db.mam_catchup.from_id, earliest_id)
.set(db.mam_catchup.from_time, (long)mam_times[account][earliest_id].to_unix())
.with(db.mam_catchup.id, "=", current_catchup_id[account])
.perform();
}
TimeSpan catchup_time_ago = (new DateTime.now_utc()).difference(mam_times[account][earliest_id]);
if (catchup_time_ago > 14 * TimeSpan.DAY) {
wait_ms = 2000;
} else if (catchup_time_ago > 5 * TimeSpan.DAY) {
wait_ms = 1000;
} else if (catchup_time_ago > 2 * TimeSpan.DAY) {
wait_ms = 200;
} else if (catchup_time_ago > TimeSpan.DAY) {
wait_ms = 50;
} else {
wait_ms = 10;
}
} else {
warning("Didn't have time for MAM id; earliest_id:%s latest_id:%s", mam_times[account].has_key(earliest_id).to_string(), mam_times[account].has_key(latest_id).to_string());
}
mam_times[account] = new HashMap<string, DateTime>();
Timeout.add(wait_ms, () => {
if (hitted_range.has_key(query_id)) {
debug(@"MAM: [%s] Hitted contains key %s", account.bare_jid.to_string(), query_id);
iq = null;
Idle.add(get_mam_range.callback);
return false;
}
stream.get_module(Xep.MessageArchiveManagement.Module.IDENTITY).page_through_results.begin(stream, null, query_id, from_time, to_time, iq, (_, res) => {
iq = stream.get_module(Xep.MessageArchiveManagement.Module.IDENTITY).page_through_results.end(res);
Idle.add(get_mam_range.callback);
});
return false;
});
yield;
}
return false;
}
private async void on_message_received(Account account, Xmpp.MessageStanza message_stanza) {
Entities.Message message = yield parse_message_stanza(account, message_stanza);
Conversation? conversation = stream_interactor.get_module(ConversationManager.IDENTITY).get_conversation_for_message(message);
if (conversation == null) return;
// MAM state database update
Xep.MessageArchiveManagement.MessageFlag? mam_flag = Xep.MessageArchiveManagement.MessageFlag.get_flag(message_stanza);
if (mam_flag == null) {
if (current_catchup_id.has_key(account)) {
string? stanza_id = UniqueStableStanzaIDs.get_stanza_id(message_stanza, account.bare_jid);
if (stanza_id != null) {
db.mam_catchup.update()
.with(db.mam_catchup.id, "=", current_catchup_id[account])
.set(db.mam_catchup.to_time, (long)message.local_time.to_unix())
.set(db.mam_catchup.to_id, stanza_id)
.perform();
}
}
}
bool abort = yield received_pipeline.run(message, message_stanza, conversation);
if (abort) return;
@ -137,7 +373,7 @@ public class MessageProcessor : StreamInteractionModule, Object {
message_sent_or_received(message, conversation);
}
public async Entities.Message parse_message_stanza(Account account, Xmpp.MessageStanza message) {
private async Entities.Message parse_message_stanza(Account account, Xmpp.MessageStanza message) {
string? body = message.body;
if (body != null) body = body.strip();
Entities.Message new_message = new Entities.Message(body);
@ -156,22 +392,21 @@ public class MessageProcessor : StreamInteractionModule, Object {
new_message.counterpart = counterpart_override ?? (new_message.direction == Entities.Message.DIRECTION_SENT ? message.to : message.from);
new_message.ourpart = new_message.direction == Entities.Message.DIRECTION_SENT ? message.from : message.to;
Xmpp.MessageArchiveManagement.MessageFlag? mam_message_flag = Xmpp.MessageArchiveManagement.MessageFlag.get_flag(message);
XmppStream? stream = stream_interactor.get_stream(account);
Xep.MessageArchiveManagement.MessageFlag? mam_message_flag = Xep.MessageArchiveManagement.MessageFlag.get_flag(message);
Xep.MessageArchiveManagement.Flag? mam_flag = stream != null ? stream.get_flag(Xep.MessageArchiveManagement.Flag.IDENTITY) : null;
EntityInfo entity_info = stream_interactor.get_module(EntityInfo.IDENTITY);
if (mam_message_flag != null && mam_message_flag.mam_id != null) {
bool server_does_mam = entity_info.has_feature_cached(account, account.bare_jid, Xmpp.MessageArchiveManagement.NS_URI);
if (server_does_mam) {
new_message.server_id = mam_message_flag.mam_id;
}
if (mam_message_flag != null && mam_flag != null && mam_flag.ns_ver == Xep.MessageArchiveManagement.NS_URI_2 && mam_message_flag.mam_id != null) {
new_message.server_id = mam_message_flag.mam_id;
} else if (message.type_ == Xmpp.MessageStanza.TYPE_GROUPCHAT) {
bool server_supports_sid = (yield entity_info.has_feature(account, new_message.counterpart.bare_jid, Xep.UniqueStableStanzaIDs.NS_URI)) ||
(yield entity_info.has_feature(account, new_message.counterpart.bare_jid, Xmpp.MessageArchiveManagement.NS_URI));
(yield entity_info.has_feature(account, new_message.counterpart.bare_jid, Xep.MessageArchiveManagement.NS_URI_2));
if (server_supports_sid) {
new_message.server_id = Xep.UniqueStableStanzaIDs.get_stanza_id(message, new_message.counterpart.bare_jid);
}
} else if (message.type_ == Xmpp.MessageStanza.TYPE_CHAT) {
bool server_supports_sid = (yield entity_info.has_feature(account, account.bare_jid, Xep.UniqueStableStanzaIDs.NS_URI)) ||
(yield entity_info.has_feature(account, account.bare_jid, Xmpp.MessageArchiveManagement.NS_URI));
(yield entity_info.has_feature(account, account.bare_jid, Xep.MessageArchiveManagement.NS_URI_2));
if (server_supports_sid) {
new_message.server_id = Xep.UniqueStableStanzaIDs.get_stanza_id(message, account.bare_jid);
}
@ -222,67 +457,6 @@ public class MessageProcessor : StreamInteractionModule, Object {
return Entities.Message.Type.CHAT;
}
private bool is_duplicate(Entities.Message message, Xmpp.MessageStanza stanza, Conversation conversation) {
Account account = conversation.account;
// Deduplicate by server_id
if (message.server_id != null) {
QueryBuilder builder = db.message.select()
.with(db.message.server_id, "=", message.server_id)
.with(db.message.counterpart_id, "=", db.get_jid_id(message.counterpart))
.with(db.message.account_id, "=", account.id);
// If the message is a duplicate
if (builder.count() > 0) {
history_sync.on_server_id_duplicate(account, stanza, message);
return true;
}
}
// Deduplicate messages by uuid
bool is_uuid = message.stanza_id != null && Regex.match_simple("""[0-9A-Fa-f]{8}-[0-9A-Fa-f]{4}-[0-9A-Fa-f]{4}-[0-9A-Fa-f]{4}-[0-9A-Fa-f]{12}""", message.stanza_id);
if (is_uuid) {
QueryBuilder builder = db.message.select()
.with(db.message.stanza_id, "=", message.stanza_id)
.with(db.message.counterpart_id, "=", db.get_jid_id(message.counterpart))
.with(db.message.account_id, "=", account.id);
if (message.direction == Message.DIRECTION_RECEIVED) {
if (message.counterpart.resourcepart != null) {
builder.with(db.message.counterpart_resource, "=", message.counterpart.resourcepart);
} else {
builder.with_null(db.message.counterpart_resource);
}
} else if (message.direction == Message.DIRECTION_SENT) {
if (message.ourpart.resourcepart != null) {
builder.with(db.message.our_resource, "=", message.ourpart.resourcepart);
} else {
builder.with_null(db.message.our_resource);
}
}
bool duplicate = builder.single().row().is_present();
return duplicate;
}
// Deduplicate messages based on content and metadata
QueryBuilder builder = db.message.select()
.with(db.message.account_id, "=", account.id)
.with(db.message.counterpart_id, "=", db.get_jid_id(message.counterpart))
.with(db.message.body, "=", message.body)
.with(db.message.time, "<", (long) message.time.add_minutes(1).to_unix())
.with(db.message.time, ">", (long) message.time.add_minutes(-1).to_unix());
if (message.stanza_id != null) {
builder.with(db.message.stanza_id, "=", message.stanza_id);
} else {
builder.with_null(db.message.stanza_id);
}
if (message.counterpart.resourcepart != null) {
builder.with(db.message.counterpart_resource, "=", message.counterpart.resourcepart);
} else {
builder.with_null(db.message.counterpart_resource);
}
return builder.count() > 0;
}
private class DeduplicateMessageListener : MessageListener {
public string[] after_actions_const = new string[]{ "FILTER_EMPTY", "MUC" };
@ -290,13 +464,85 @@ public class MessageProcessor : StreamInteractionModule, Object {
public override string[] after_actions { get { return after_actions_const; } }
private MessageProcessor outer;
private Database db;
public DeduplicateMessageListener(MessageProcessor outer) {
public DeduplicateMessageListener(MessageProcessor outer, Database db) {
this.outer = outer;
this.db = db;
}
public override async bool run(Entities.Message message, Xmpp.MessageStanza stanza, Conversation conversation) {
return outer.is_duplicate(message, stanza, conversation);
Account account = conversation.account;
Xep.MessageArchiveManagement.MessageFlag? mam_flag = Xep.MessageArchiveManagement.MessageFlag.get_flag(stanza);
// Deduplicate by server_id
if (message.server_id != null) {
QueryBuilder builder = db.message.select()
.with(db.message.server_id, "=", message.server_id)
.with(db.message.counterpart_id, "=", db.get_jid_id(message.counterpart))
.with(db.message.account_id, "=", account.id);
bool duplicate = builder.count() > 0;
if (duplicate && mam_flag != null) {
debug(@"MAM: [%s] Hitted range duplicate server id. id %s qid %s", account.bare_jid.to_string(), message.server_id, mam_flag.query_id);
if (outer.catchup_until_time.has_key(account) && mam_flag.server_time.compare(outer.catchup_until_time[account]) < 0) {
outer.hitted_range[mam_flag.query_id] = -1;
debug(@"MAM: [%s] In range (time) %s < %s", account.bare_jid.to_string(), mam_flag.server_time.to_string(), outer.catchup_until_time[account].to_string());
}
}
if (duplicate) return true;
}
// Deduplicate messages by uuid
bool is_uuid = message.stanza_id != null && Regex.match_simple("""[0-9A-Fa-f]{8}-[0-9A-Fa-f]{4}-[0-9A-Fa-f]{4}-[0-9A-Fa-f]{4}-[0-9A-Fa-f]{12}""", message.stanza_id);
if (is_uuid) {
QueryBuilder builder = db.message.select()
.with(db.message.stanza_id, "=", message.stanza_id)
.with(db.message.counterpart_id, "=", db.get_jid_id(message.counterpart))
.with(db.message.account_id, "=", account.id);
if (message.direction == Message.DIRECTION_RECEIVED) {
if (message.counterpart.resourcepart != null) {
builder.with(db.message.counterpart_resource, "=", message.counterpart.resourcepart);
} else {
builder.with_null(db.message.counterpart_resource);
}
} else if (message.direction == Message.DIRECTION_SENT) {
if (message.ourpart.resourcepart != null) {
builder.with(db.message.our_resource, "=", message.ourpart.resourcepart);
} else {
builder.with_null(db.message.our_resource);
}
}
RowOption row_opt = builder.single().row();
bool duplicate = row_opt.is_present();
if (duplicate && mam_flag != null && row_opt[db.message.server_id] == null &&
outer.catchup_until_time.has_key(account) && mam_flag.server_time.compare(outer.catchup_until_time[account]) > 0) {
outer.hitted_range[mam_flag.query_id] = -1;
debug(@"MAM: [%s] Hitted range duplicate message id. id %s qid %s", account.bare_jid.to_string(), message.stanza_id, mam_flag.query_id);
}
return duplicate;
}
// Deduplicate messages based on content and metadata
QueryBuilder builder = db.message.select()
.with(db.message.account_id, "=", account.id)
.with(db.message.counterpart_id, "=", db.get_jid_id(message.counterpart))
.with(db.message.body, "=", message.body)
.with(db.message.time, "<", (long) message.time.add_minutes(1).to_unix())
.with(db.message.time, ">", (long) message.time.add_minutes(-1).to_unix());
if (message.stanza_id != null) {
builder.with(db.message.stanza_id, "=", message.stanza_id);
} else {
builder.with_null(db.message.stanza_id);
}
if (message.counterpart.resourcepart != null) {
builder.with(db.message.counterpart_resource, "=", message.counterpart.resourcepart);
} else {
builder.with_null(db.message.counterpart_resource);
}
return builder.count() > 0;
}
}
@ -307,8 +553,7 @@ public class MessageProcessor : StreamInteractionModule, Object {
public override string[] after_actions { get { return after_actions_const; } }
public override async bool run(Entities.Message message, Xmpp.MessageStanza stanza, Conversation conversation) {
return message.body == null &&
Xep.StatelessFileSharing.get_file_shares(stanza) == null;
return (message.body == null);
}
}
@ -318,39 +563,19 @@ public class MessageProcessor : StreamInteractionModule, Object {
public override string action_group { get { return "STORE"; } }
public override string[] after_actions { get { return after_actions_const; } }
private MessageProcessor outer;
private StreamInteractor stream_interactor;
public StoreMessageListener(MessageProcessor outer, StreamInteractor stream_interactor) {
this.outer = outer;
public StoreMessageListener(StreamInteractor stream_interactor) {
this.stream_interactor = stream_interactor;
}
public override async bool run(Entities.Message message, Xmpp.MessageStanza stanza, Conversation conversation) {
if (message.body == null) return true;
stream_interactor.get_module(MessageStorage.IDENTITY).add_message(message, conversation);
return false;
}
}
private class MarkupListener : MessageListener {
public string[] after_actions_const = new string[]{ "STORE" };
public override string action_group { get { return "Markup"; } }
public override string[] after_actions { get { return after_actions_const; } }
private StreamInteractor stream_interactor;
public MarkupListener(StreamInteractor stream_interactor) {
this.stream_interactor = stream_interactor;
}
public override async bool run(Entities.Message message, Xmpp.MessageStanza stanza, Conversation conversation) {
Gee.List<MessageMarkup.Span> markups = MessageMarkup.get_spans(stanza);
message.persist_markups(markups, message.id);
return false;
}
}
private class StoreContentItemListener : MessageListener {
public string[] after_actions_const = new string[]{ "DEDUPLICATE", "DECRYPT", "FILTER_EMPTY", "STORE", "CORRECTION", "MESSAGE_REINTERPRETING" };
@ -370,7 +595,30 @@ public class MessageProcessor : StreamInteractionModule, Object {
}
}
public Entities.Message create_out_message(string? text, Conversation conversation) {
private class MamMessageListener : MessageListener {
public string[] after_actions_const = new string[]{ "DEDUPLICATE" };
public override string action_group { get { return "MAM_NODE"; } }
public override string[] after_actions { get { return after_actions_const; } }
private StreamInteractor stream_interactor;
public MamMessageListener(StreamInteractor stream_interactor) {
this.stream_interactor = stream_interactor;
}
public override async bool run(Entities.Message message, Xmpp.MessageStanza stanza, Conversation conversation) {
bool is_mam_message = Xep.MessageArchiveManagement.MessageFlag.get_flag(stanza) != null;
XmppStream? stream = stream_interactor.get_stream(conversation.account);
Xep.MessageArchiveManagement.Flag? mam_flag = stream != null ? stream.get_flag(Xep.MessageArchiveManagement.Flag.IDENTITY) : null;
if (is_mam_message || (mam_flag != null && mam_flag.cought_up == true)) {
conversation.account.mam_earliest_synced = message.local_time;
}
return false;
}
}
public Entities.Message create_out_message(string text, Conversation conversation) {
Entities.Message message = new Entities.Message(text);
message.type_ = Util.get_message_type_for_conversation(conversation);
message.stanza_id = random_uuid();
@ -412,24 +660,6 @@ public class MessageProcessor : StreamInteractionModule, Object {
} else {
new_message.type_ = MessageStanza.TYPE_CHAT;
}
if (message.quoted_item_id != 0) {
ContentItem? quoted_content_item = stream_interactor.get_module(ContentItemStore.IDENTITY).get_item_by_id(conversation, message.quoted_item_id);
if (quoted_content_item != null) {
Jid? quoted_sender = message.from;
string? quoted_stanza_id = stream_interactor.get_module(ContentItemStore.IDENTITY).get_message_id_for_content_item(conversation, quoted_content_item);
if (quoted_sender != null && quoted_stanza_id != null) {
Xep.Replies.set_reply_to(new_message, new Xep.Replies.ReplyTo(quoted_sender, quoted_stanza_id));
}
foreach (var fallback in message.get_fallbacks()) {
Xep.FallbackIndication.set_fallback(new_message, fallback);
}
}
}
MessageMarkup.add_spans(new_message, message.get_markups());
build_message_stanza(message, new_message, conversation);
pre_message_send(message, new_message, conversation);
if (message.marked == Entities.Message.Marked.UNSENT || message.marked == Entities.Message.Marked.WONTSEND) return;
@ -449,10 +679,6 @@ public class MessageProcessor : StreamInteractionModule, Object {
}
}
if (conversation.get_send_typing_setting(stream_interactor) == Conversation.Setting.ON) {
ChatStateNotifications.add_state_to_message(new_message, ChatStateNotifications.STATE_ACTIVE);
}
stream.get_module(MessageModule.IDENTITY).send_message.begin(stream, new_message, (_, res) => {
try {
stream.get_module(MessageModule.IDENTITY).send_message.end(res);
@ -465,7 +691,7 @@ public class MessageProcessor : StreamInteractionModule, Object {
if (!conversation.type_.is_muc_semantic() && current_own_jid != null && !current_own_jid.equals(message.ourpart)) {
message.ourpart = current_own_jid;
}
} catch (IOError e) {
} catch (IOStreamError e) {
message.marked = Entities.Message.Marked.UNSENT;
if (stream != stream_interactor.get_stream(conversation.account)) {

View File

@ -42,7 +42,6 @@ public class MessageStorage : StreamInteractionModule, Object {
.with(db.message.type_, "=", (int) Util.get_message_type_for_conversation(conversation))
.order_by(db.message.time, "DESC")
.outer_join_with(db.message_correction, db.message_correction.message_id, db.message.id)
.outer_join_with(db.reply, db.reply.message_id, db.message.id)
.limit(count);
Gee.List<Message> ret = new LinkedList<Message>(Message.equals_func);
@ -93,20 +92,11 @@ public class MessageStorage : StreamInteractionModule, Object {
RowOption row_option = db.message.select().with(db.message.id, "=", id)
.outer_join_with(db.message_correction, db.message_correction.message_id, db.message.id)
.outer_join_with(db.reply, db.reply.message_id, db.message.id)
.row();
return create_message_from_row_opt(row_option, conversation);
}
public Message? get_message_by_referencing_id(string id, Conversation conversation) {
if (conversation.type_ == Conversation.Type.CHAT) {
return stream_interactor.get_module(MessageStorage.IDENTITY).get_message_by_stanza_id(id, conversation);
} else {
return stream_interactor.get_module(MessageStorage.IDENTITY).get_message_by_server_id(id, conversation);
}
}
public Message? get_message_by_stanza_id(string stanza_id, Conversation conversation) {
if (messages_by_stanza_id.has_key(conversation)) {
Message? message = messages_by_stanza_id[conversation][stanza_id];
@ -121,10 +111,11 @@ public class MessageStorage : StreamInteractionModule, Object {
.with(db.message.type_, "=", (int) Util.get_message_type_for_conversation(conversation))
.with(db.message.stanza_id, "=", stanza_id)
.order_by(db.message.time, "DESC")
.outer_join_with(db.message_correction, db.message_correction.message_id, db.message.id)
.outer_join_with(db.reply, db.reply.message_id, db.message.id);
.outer_join_with(db.message_correction, db.message_correction.message_id, db.message.id);
if (conversation.counterpart.resourcepart != null) {
if (conversation.counterpart.resourcepart == null) {
query.with_null(db.message.counterpart_resource);
} else {
query.with(db.message.counterpart_resource, "=", conversation.counterpart.resourcepart);
}
@ -147,8 +138,7 @@ public class MessageStorage : StreamInteractionModule, Object {
.with(db.message.type_, "=", (int) Util.get_message_type_for_conversation(conversation))
.with(db.message.server_id, "=", server_id)
.order_by(db.message.time, "DESC")
.outer_join_with(db.message_correction, db.message_correction.message_id, db.message.id)
.outer_join_with(db.reply, db.reply.message_id, db.message.id);
.outer_join_with(db.message_correction, db.message_correction.message_id, db.message.id);
if (conversation.counterpart.resourcepart == null) {
query.with_null(db.message.counterpart_resource);
@ -199,16 +189,6 @@ public class MessageStorage : StreamInteractionModule, Object {
message_refs.remove_at(message_refs.size - 1);
}
}
public static string? get_reference_id(Message message) {
if (message.edit_to != null) return message.edit_to;
if (message.type_ == Message.Type.CHAT) {
return message.stanza_id;
} else {
return message.server_id;
}
}
}
}

View File

@ -24,7 +24,7 @@ public class ModuleManager {
return null;
}
public ArrayList<XmppStreamModule> get_modules(Account account) {
public ArrayList<XmppStreamModule> get_modules(Account account, string? resource = null) {
ArrayList<XmppStreamModule> modules = new ArrayList<XmppStreamModule>();
lock (module_map) {
@ -34,7 +34,7 @@ public class ModuleManager {
foreach (XmppStreamModule module in module_map[account]) {
if (module.get_id() == Bind.Module.IDENTITY.id) {
((Bind.Module) module).requested_resource = account.resourcepart;
((Bind.Module) module).requested_resource = resource ?? account.resourcepart;
} else if (module.get_id() == Sasl.Module.IDENTITY.id) {
((Sasl.Module) module).password = account.password;
}
@ -57,9 +57,8 @@ public class ModuleManager {
module_map[account].add(new Xep.Bookmarks2.Module());
module_map[account].add(new Presence.Module());
module_map[account].add(new Xmpp.MessageModule());
module_map[account].add(new Xmpp.MessageArchiveManagement.Module());
module_map[account].add(new Xep.MessageArchiveManagement.Module());
module_map[account].add(new Xep.MessageCarbons.Module());
module_map[account].add(new Xep.BitsOfBinary.Module());
module_map[account].add(new Xep.Muc.Module());
module_map[account].add(new Xep.Pubsub.Module());
module_map[account].add(new Xep.MessageDeliveryReceipts.Module());
@ -71,7 +70,6 @@ public class ModuleManager {
module_map[account].add(new StreamError.Module());
module_map[account].add(new Xep.InBandRegistration.Module());
module_map[account].add(new Xep.HttpFileUpload.Module());
module_map[account].add(new Xep.Reactions.Module());
module_map[account].add(new Xep.Socks5Bytestreams.Module());
module_map[account].add(new Xep.InBandBytestreams.Module());
module_map[account].add(new Xep.Jingle.Module());
@ -82,7 +80,6 @@ public class ModuleManager {
module_map[account].add(new Xep.LastMessageCorrection.Module());
module_map[account].add(new Xep.DirectMucInvitations.Module());
module_map[account].add(new Xep.JingleMessageInitiation.Module());
module_map[account].add(new Xep.OccupantIds.Module());
module_map[account].add(new Xep.JingleRawUdp.Module());
module_map[account].add(new Xep.Muji.Module());
module_map[account].add(new Xep.CallInvites.Module());

View File

@ -21,15 +21,13 @@ public class MucManager : StreamInteractionModule, Object {
public signal void conference_removed(Account account, Jid jid);
private StreamInteractor stream_interactor;
private HashMap<Account, HashSet<Jid>> mucs_joined = new HashMap<Account, HashSet<Jid>>(Account.hash_func, Account.equals_func);
private HashMap<Account, HashSet<Jid>> mucs_todo = new HashMap<Account, HashSet<Jid>>(Account.hash_func, Account.equals_func);
private HashMap<Account, HashSet<Jid>> mucs_joining = new HashMap<Account, HashSet<Jid>>(Account.hash_func, Account.equals_func);
private HashMap<Account, HashMap<Jid, Cancellable>> mucs_sync_cancellables = new HashMap<Account, HashMap<Jid, Cancellable>>(Account.hash_func, Account.equals_func);
private HashMap<Jid, Xep.Muc.MucEnterError> enter_errors = new HashMap<Jid, Xep.Muc.MucEnterError>(Jid.hash_func, Jid.equals_func);
private ReceivedMessageListener received_message_listener;
private HashMap<Account, BookmarksProvider> bookmarks_provider = new HashMap<Account, BookmarksProvider>(Account.hash_func, Account.equals_func);
private HashMap<Account, Gee.List<Jid>> invites = new HashMap<Account, Gee.List<Jid>>(Account.hash_func, Account.equals_func);
public HashMap<Account, Jid> default_muc_server = new HashMap<Account, Jid>(Account.hash_func, Account.equals_func);
private HashMap<Account, HashMap<Jid, string>> own_occupant_ids = new HashMap<Account, HashMap<Jid, string>>(Account.hash_func, Account.equals_func);
public static void start(StreamInteractor stream_interactor) {
MucManager m = new MucManager(stream_interactor);
@ -54,11 +52,10 @@ public class MucManager : StreamInteractionModule, Object {
}
return true;
});
stream_interactor.get_module(MessageProcessor.IDENTITY).build_message_stanza.connect(on_build_message_stanza);
}
// already_autojoin: Without this flag we'd be retrieving bookmarks (to check for autojoin) from the sender on every join
public async Muc.JoinResult? join(Account account, Jid jid, string? nick, string? password, bool already_autojoin = false, Cancellable? cancellable = null) {
public async Muc.JoinResult? join(Account account, Jid jid, string? nick, string? password, bool already_autojoin = false) {
XmppStream? stream = stream_interactor.get_stream(account);
if (stream == null) return null;
@ -71,20 +68,17 @@ public class MucManager : StreamInteractionModule, Object {
if (last_message != null) history_since = last_message.time;
}
bool receive_history = true;
EntityInfo entity_info = stream_interactor.get_module(EntityInfo.IDENTITY);
bool can_do_mam = yield entity_info.has_feature(account, jid, Xmpp.MessageArchiveManagement.NS_URI);
if (can_do_mam) {
receive_history = false;
history_since = null;
}
if (!mucs_joining.has_key(account)) {
mucs_joining[account] = new HashSet<Jid>(Jid.hash_bare_func, Jid.equals_bare_func);
}
mucs_joining[account].add(jid);
Muc.JoinResult? res = yield stream.get_module(Xep.Muc.Module.IDENTITY).enter(stream, jid.bare_jid, nick_, password, history_since, receive_history, null);
if (!mucs_todo.has_key(account)) {
mucs_todo[account] = new HashSet<Jid>(Jid.hash_bare_func, Jid.equals_bare_func);
}
mucs_todo[account].add(jid.with_resource(nick_));
Muc.JoinResult? res = yield stream.get_module(Xep.Muc.Module.IDENTITY).enter(stream, jid.bare_jid, nick_, password, history_since, null);
mucs_joining[account].remove(jid);
@ -97,60 +91,26 @@ public class MucManager : StreamInteractionModule, Object {
Conversation joined_conversation = stream_interactor.get_module(ConversationManager.IDENTITY).create_conversation(jid, account, Conversation.Type.GROUPCHAT);
joined_conversation.nickname = nick;
stream_interactor.get_module(ConversationManager.IDENTITY).start_conversation(joined_conversation);
if (can_do_mam) {
var history_sync = stream_interactor.get_module(MessageProcessor.IDENTITY).history_sync;
if (conversation == null) {
// We never joined the conversation before, just fetch the latest MAM page
yield history_sync.fetch_latest_page(account, jid.bare_jid, null, new DateTime.from_unix_utc(0), cancellable);
} else {
// Fetch everything up to the last time the user actively joined
if (!mucs_sync_cancellables.has_key(account)) {
mucs_sync_cancellables[account] = new HashMap<Jid, Cancellable>();
}
if (!mucs_sync_cancellables[account].has_key(jid.bare_jid)) {
mucs_sync_cancellables[account][jid.bare_jid] = new Cancellable();
history_sync.fetch_everything.begin(account, jid.bare_jid, mucs_sync_cancellables[account][jid.bare_jid], conversation.active_last_changed, (_, res) => {
history_sync.fetch_everything.end(res);
mucs_sync_cancellables[account].unset(jid.bare_jid);
});
}
}
}
} else if (res.muc_error != null) {
// Join failed
enter_errors[jid] = res.muc_error;
}
if (!mucs_joined.has_key(account)) {
mucs_joined[account] = new HashSet<Jid>(Jid.hash_bare_func, Jid.equals_bare_func);
}
mucs_joined[account].add(jid.with_resource(res.nick ?? nick_));
return res;
}
public void part(Account account, Jid jid) {
if (mucs_joined.has_key(account) && mucs_joined[account].contains(jid)) {
mucs_joined[account].remove(jid);
}
if (!mucs_todo.has_key(account) || !mucs_todo[account].contains(jid)) return;
mucs_todo[account].remove(jid);
XmppStream? stream = stream_interactor.get_stream(account);
if (stream != null) {
unset_autojoin(account, stream, jid);
stream.get_module(Xep.Muc.Module.IDENTITY).exit(stream, jid.bare_jid);
}
if (stream == null) return;
unset_autojoin(account, stream, jid);
stream.get_module(Xep.Muc.Module.IDENTITY).exit(stream, jid.bare_jid);
Conversation? conversation = stream_interactor.get_module(ConversationManager.IDENTITY).get_conversation(jid, account, Conversation.Type.GROUPCHAT);
Conversation? conversation = stream_interactor.get_module(ConversationManager.IDENTITY).get_conversation(jid, account);
if (conversation != null) stream_interactor.get_module(ConversationManager.IDENTITY).close_conversation(conversation);
cancel_sync(account, jid);
}
private void cancel_sync(Account account, Jid jid) {
if (mucs_sync_cancellables.has_key(account) && mucs_sync_cancellables[account].has_key(jid.bare_jid) && !mucs_sync_cancellables[account][jid.bare_jid].is_cancelled()) {
mucs_sync_cancellables[account][jid.bare_jid].cancel();
}
}
public async DataForms.DataForm? get_config_form(Account account, Jid jid) {
@ -183,9 +143,9 @@ public class MucManager : StreamInteractionModule, Object {
conversation.nickname = new_nick;
if (mucs_joined.has_key(conversation.account)) {
mucs_joined[conversation.account].remove(conversation.counterpart);
mucs_joined[conversation.account].add(conversation.counterpart.with_resource(new_nick));
if (mucs_todo.has_key(conversation.account)) {
mucs_todo[conversation.account].remove(conversation.counterpart);
mucs_todo[conversation.account].add(conversation.counterpart.with_resource(new_nick));
}
// Update nick in bookmark
@ -233,8 +193,15 @@ public class MucManager : StreamInteractionModule, Object {
//the term `private room` is a short hand for members-only+non-anonymous rooms
public bool is_private_room(Account account, Jid jid) {
var entity_info = stream_interactor.get_module(EntityInfo.IDENTITY);
return entity_info.has_feature_offline(account, jid, "muc_membersonly") && entity_info.has_feature_offline(account, jid, "muc_nonanonymous");
XmppStream? stream = stream_interactor.get_stream(account);
if (stream == null) {
return false;
}
Xep.Muc.Flag? flag = stream.get_flag(Xep.Muc.Flag.IDENTITY);
if (flag == null) {
return false;
}
return flag.has_room_feature(jid, Xep.Muc.Feature.NON_ANONYMOUS) && flag.has_room_feature(jid, Xep.Muc.Feature.MEMBERS_ONLY);
}
public bool is_moderated_room(Account account, Jid jid) {
@ -335,14 +302,6 @@ public class MucManager : StreamInteractionModule, Object {
return null;
}
public Jid? get_occupant_jid(Account account, Jid room, Jid occupant_real_jid) {
Xep.Muc.Flag? flag = get_muc_flag(account);
if (flag != null) {
return flag.get_occupant_jid(occupant_real_jid, room);
}
return null;
}
public Xep.Muc.Role? get_role(Jid jid, Account account) {
Xep.Muc.Flag? flag = get_muc_flag(account);
if (flag != null) {
@ -406,16 +365,8 @@ public class MucManager : StreamInteractionModule, Object {
return get_own_jid(jid, account) != null;
}
public string? get_own_occupant_id(Account account, Jid muc_jid) {
if (own_occupant_ids.has_key(account) && own_occupant_ids[account].has_key(muc_jid)) {
return own_occupant_ids[account][muc_jid];
}
return null;
}
private void on_account_added(Account account) {
stream_interactor.module_manager.get_module(account, Xep.Muc.Module.IDENTITY).self_removed_from_room.connect( (stream, jid, code) => {
cancel_sync(account, jid);
left(account, jid);
});
stream_interactor.module_manager.get_module(account, Xep.Muc.Module.IDENTITY).subject_set.connect( (stream, subject, jid) => {
@ -441,12 +392,6 @@ public class MucManager : StreamInteractionModule, Object {
private_room_occupant_updated(account, room, occupant);
}
});
stream_interactor.module_manager.get_module(account, Xep.OccupantIds.Module.IDENTITY).received_own_occupant_id.connect( (stream, jid, occupant_id) => {
if (!own_occupant_ids.has_key(account)) {
own_occupant_ids[account] = new HashMap<Jid, string>(Jid.hash_bare_func, Jid.equals_bare_func);
}
own_occupant_ids[account][jid] = occupant_id;
});
}
private async void search_default_muc_server(Account account) {
@ -480,14 +425,6 @@ public class MucManager : StreamInteractionModule, Object {
}
private async void on_stream_negotiated(Account account, XmppStream stream) {
if (mucs_sync_cancellables.has_key(account)) {
foreach (Cancellable cancellable in mucs_sync_cancellables[account].values) {
if (!cancellable.is_cancelled()) {
cancellable.cancel();
}
}
}
yield initialize_bookmarks_provider(account);
Set<Conference>? conferences = yield bookmarks_provider[account].get_conferences(stream);
@ -631,7 +568,8 @@ public class MucManager : StreamInteractionModule, Object {
} else if (conversation.active && !conference.autojoin) {
part(account, conference.jid);
}
} else if (conference.autojoin) {
}
if (conference.autojoin) {
join.begin(account, conference.jid, conference.nick, conference.password);
}
conference_added(account, conference);
@ -645,19 +583,13 @@ public class MucManager : StreamInteractionModule, Object {
conference_removed(account, jid);
}
private void on_build_message_stanza(Entities.Message message, Xmpp.MessageStanza message_stanza, Conversation conversation) {
if (conversation.type_ == Conversation.Type.GROUPCHAT_PM) {
Xmpp.Xep.Muc.add_muc_pm_message_stanza_x_node(message_stanza);
}
}
private void self_ping(Account account) {
XmppStream? stream = stream_interactor.get_stream(account);
if (stream == null) return;
if (!mucs_joined.has_key(account)) return;
if (!mucs_todo.has_key(account)) return;
foreach (Jid jid in mucs_joined[account]) {
foreach (Jid jid in mucs_todo[account]) {
bool joined = false;
@ -666,7 +598,7 @@ public class MucManager : StreamInteractionModule, Object {
});
Timeout.add_seconds(10, () => {
if (joined || !mucs_joined.has_key(account) || stream_interactor.get_stream(account) != stream) return false;
if (joined || !mucs_todo.has_key(account) || stream_interactor.get_stream(account) != stream) return false;
join.begin(account, jid.bare_jid, jid.resourcepart, null, true);
return false;
@ -702,10 +634,6 @@ public class MucManager : StreamInteractionModule, Object {
if (m != null) {
// For own messages from this device (msg is a duplicate)
m.marked = Message.Marked.RECEIVED;
string? server_id = Xep.UniqueStableStanzaIDs.get_stanza_id(stanza, m.counterpart.bare_jid);
if (server_id != null) {
m.server_id = server_id;
}
}
// For own messages from other devices (msg is not a duplicate msg)
message.marked = Message.Marked.RECEIVED;

View File

@ -1,464 +0,0 @@
using Gee;
using Qlite;
using Xmpp;
using Xmpp.Xep;
using Dino.Entities;
public class Dino.Reactions : StreamInteractionModule, Object {
public static ModuleIdentity<Reactions> IDENTITY = new ModuleIdentity<Reactions>("reactions");
public string id { get { return IDENTITY.id; } }
public signal void reaction_added(Account account, int content_item_id, Jid jid, string reaction);
public signal void reaction_removed(Account account, int content_item_id, Jid jid, string reaction);
private StreamInteractor stream_interactor;
private Database db;
private HashMap<string, Gee.List<ReactionInfo>> reaction_infos = new HashMap<string, Gee.List<ReactionInfo>>();
public static void start(StreamInteractor stream_interactor, Database database) {
Reactions m = new Reactions(stream_interactor, database);
stream_interactor.add_module(m);
}
private Reactions(StreamInteractor stream_interactor, Database database) {
this.stream_interactor = stream_interactor;
this.db = database;
stream_interactor.account_added.connect(on_account_added);
stream_interactor.get_module(ContentItemStore.IDENTITY).new_item.connect(on_new_item);
}
public void add_reaction(Conversation conversation, ContentItem content_item, string reaction) {
Gee.List<string> reactions = get_own_reactions(conversation, content_item);
if (!reactions.contains(reaction)) {
reactions.add(reaction);
}
try {
send_reactions(conversation, content_item, reactions);
reaction_added(conversation.account, content_item.id, conversation.account.bare_jid, reaction);
} catch (IOError e) {}
}
public void remove_reaction(Conversation conversation, ContentItem content_item, string reaction) {
Gee.List<string> reactions = get_own_reactions(conversation, content_item);
reactions.remove(reaction);
try {
send_reactions(conversation, content_item, reactions);
reaction_removed(conversation.account, content_item.id, conversation.account.bare_jid, reaction);
} catch (IOError e) {}
}
public Gee.List<ReactionUsers> get_item_reactions(Conversation conversation, ContentItem content_item) {
if (conversation.type_ == Conversation.Type.CHAT) {
return get_chat_message_reactions(conversation.account, content_item);
} else {
return get_muc_message_reactions(conversation.account, content_item);
}
}
public bool conversation_supports_reactions(Conversation conversation) {
if (conversation.type_ == Conversation.Type.CHAT) {
return true;
} else {
// The MUC server needs to 1) support stable stanza ids 2) either support occupant ids or be a private room (where we know real jids)
var entity_info = stream_interactor.get_module(EntityInfo.IDENTITY);
bool server_supports_sid = (entity_info.has_feature_cached(conversation.account, conversation.counterpart.bare_jid, Xep.UniqueStableStanzaIDs.NS_URI)) ||
(entity_info.has_feature_cached(conversation.account, conversation.counterpart.bare_jid, Xmpp.MessageArchiveManagement.NS_URI));
if (!server_supports_sid) return false;
bool? supports_occupant_ids = entity_info.has_feature_cached(conversation.account, conversation.counterpart, Xep.OccupantIds.NS_URI);
if (supports_occupant_ids) return true;
return stream_interactor.get_module(MucManager.IDENTITY).is_private_room(conversation.account, conversation.counterpart);
}
}
private void send_reactions(Conversation conversation, ContentItem content_item, Gee.List<string> reactions) throws IOError {
string? message_id = stream_interactor.get_module(ContentItemStore.IDENTITY).get_message_id_for_content_item(conversation, content_item);
if (message_id == null) throw new IOError.FAILED("No message for content_item");
XmppStream? stream = stream_interactor.get_stream(conversation.account);
if (stream == null) throw new IOError.NOT_CONNECTED("No stream");
var reactions_module = stream.get_module(Xmpp.Xep.Reactions.Module.IDENTITY);
if (conversation.type_ == Conversation.Type.GROUPCHAT) {
reactions_module.send_reaction.begin(stream, conversation.counterpart, "groupchat", message_id, reactions);
// We save the reaction when it gets reflected back to us
} else if (conversation.type_ == Conversation.Type.GROUPCHAT_PM) {
reactions_module.send_reaction.begin(stream, conversation.counterpart, "chat", message_id, reactions);
} else if (conversation.type_ == Conversation.Type.CHAT) {
int64 now_millis = GLib.get_real_time () / 1000;
reactions_module.send_reaction.begin(stream, conversation.counterpart, "chat", message_id, reactions, (_, res) => {
try {
reactions_module.send_reaction.end(res);
save_chat_reactions(conversation.account, conversation.account.bare_jid, content_item.id, now_millis, reactions);
} catch (IOError e) {}
});
}
}
private Gee.List<string> get_own_reactions(Conversation conversation, ContentItem content_item) {
if (conversation.type_ == Conversation.Type.CHAT) {
return get_chat_user_reactions(conversation.account, content_item.id, conversation.account.bare_jid)
.emojis;
} else if (conversation.type_ == Conversation.Type.GROUPCHAT) {
string own_occupant_id = stream_interactor.get_module(MucManager.IDENTITY).get_own_occupant_id(conversation.account, content_item.jid);
return get_muc_user_reactions(conversation.account, content_item.id, own_occupant_id, conversation.account.bare_jid)
.emojis;
}
return new ArrayList<string>();
}
private class ReactionsTime {
public Gee.List<string>? emojis = null;
public int64 time = -1;
}
private ReactionsTime get_chat_user_reactions(Account account, int content_item_id, Jid jid) {
int jid_id = db.get_jid_id(jid);
QueryBuilder query = db.reaction.select()
.with(db.reaction.account_id, "=", account.id)
.with(db.reaction.content_item_id, "=", content_item_id)
.with(db.reaction.jid_id, "=", jid_id);
RowOption row = query.single().row();
ReactionsTime ret = new ReactionsTime();
if (row.is_present()) {
ret.emojis = string_to_emoji_list(row[db.reaction.emojis]);
ret.time = row[db.reaction.time];
} else {
ret.emojis = new ArrayList<string>();
ret.time = -1;
}
return ret;
}
private ReactionsTime get_muc_user_reactions(Account account, int content_item_id, string? occupant_id, Jid? real_jid) {
if (occupant_id == null && real_jid == null) critical("Need occupant id or real jid of a reaction");
QueryBuilder query = db.reaction.select()
.with(db.reaction.account_id, "=", account.id)
.with(db.reaction.content_item_id, "=", content_item_id)
.outer_join_with(db.occupantid, db.occupantid.id, db.reaction.occupant_id);
if (occupant_id != null) {
query.with(db.occupantid.occupant_id, "=", occupant_id);
} else if (real_jid != null) {
query.with(db.reaction.jid_id, "=", db.get_jid_id(real_jid));
}
RowOption row = query.single().row();
ReactionsTime ret = new ReactionsTime();
if (row.is_present()) {
ret.emojis = string_to_emoji_list(row[db.reaction.emojis]);
ret.time = row[db.reaction.time];
} else {
ret.emojis = new ArrayList<string>();
ret.time = -1;
}
return ret;
}
private Gee.List<string> string_to_emoji_list(string emoji_str) {
ArrayList<string> ret = new ArrayList<string>();
foreach (string emoji in emoji_str.split(",")) {
if (emoji.length != 0)
ret.add(emoji);
}
return ret;
}
public Gee.List<ReactionUsers> get_chat_message_reactions(Account account, ContentItem content_item) {
QueryBuilder select = db.reaction.select()
.with(db.reaction.account_id, "=", account.id)
.with(db.reaction.content_item_id, "=", content_item.id)
.order_by(db.reaction.time, "DESC");
var ret = new ArrayList<ReactionUsers>();
var index = new HashMap<string, ReactionUsers>();
foreach (Row row in select) {
string emoji_str = row[db.reaction.emojis];
Jid jid = db.get_jid_by_id(row[db.reaction.jid_id]);
foreach (string emoji in emoji_str.split(",")) {
if (!index.has_key(emoji)) {
index[emoji] = new ReactionUsers() { reaction=emoji, jids=new ArrayList<Jid>(Jid.equals_func) };
ret.add(index[emoji]);
}
index[emoji].jids.add(jid);
}
}
return ret;
}
public Gee.List<ReactionUsers> get_muc_message_reactions(Account account, ContentItem content_item) {
QueryBuilder select = db.reaction.select()
.with(db.reaction.account_id, "=", account.id)
.with(db.reaction.content_item_id, "=", content_item.id)
.outer_join_with(db.occupantid, db.occupantid.id, db.reaction.occupant_id)
.outer_join_with(db.jid, db.jid.id, db.reaction.jid_id)
.order_by(db.reaction.time, "DESC");
string? own_occupant_id = stream_interactor.get_module(MucManager.IDENTITY).get_own_occupant_id(account, content_item.jid);
var ret = new ArrayList<ReactionUsers>();
var index = new HashMap<string, ReactionUsers>();
foreach (Row row in select) {
string emoji_str = row[db.reaction.emojis];
Jid jid = null;
if (!db.jid.bare_jid.is_null(row)) {
jid = new Jid(row[db.jid.bare_jid]);
} else if (!db.occupantid.occupant_id.is_null(row)) {
if (row[db.occupantid.occupant_id] == own_occupant_id) {
jid = account.bare_jid;
} else {
string nick = row[db.occupantid.last_nick];
jid = content_item.jid.with_resource(nick);
}
} else {
warning("Reaction with neither JID nor occupant id");
}
foreach (string emoji in emoji_str.split(",")) {
if (!index.has_key(emoji)) {
index[emoji] = new ReactionUsers() { reaction=emoji, jids=new ArrayList<Jid>(Jid.equals_func) };
ret.add(index[emoji]);
}
index[emoji].jids.add(jid);
}
}
return ret;
}
private void on_account_added(Account account) {
// TODO get time from delays
stream_interactor.module_manager.get_module(account, Xmpp.Xep.Reactions.Module.IDENTITY).received_reactions.connect((stream, from_jid, message_id, reactions, stanza) => {
on_reaction_received.begin(account, from_jid, message_id, reactions, stanza);
});
}
private async void on_reaction_received(Account account, Jid from_jid, string message_id, Gee.List<string> reactions, MessageStanza stanza) {
if (stanza.type_ == MessageStanza.TYPE_GROUPCHAT) {
// Apply the same restrictions for incoming reactions as we do on sending them
Conversation muc_conversation = stream_interactor.get_module(ConversationManager.IDENTITY).approx_conversation_for_stanza(from_jid, account.bare_jid, account, MessageStanza.TYPE_GROUPCHAT);
bool muc_supports_reactions = conversation_supports_reactions(muc_conversation);
if (!muc_supports_reactions) return;
}
Message reaction_message = yield stream_interactor.get_module(MessageProcessor.IDENTITY).parse_message_stanza(account, stanza);
Conversation conversation = stream_interactor.get_module(ConversationManager.IDENTITY).get_conversation_for_message(reaction_message);
int content_item_id = stream_interactor.get_module(ContentItemStore.IDENTITY).get_content_item_id_for_message_id(conversation, message_id);
var reaction_info = new ReactionInfo() { conversation=conversation, from_jid=from_jid, reactions=reactions, stanza=stanza, received_time=new DateTime.now() };
if (content_item_id != -1) {
process_reaction_for_message(content_item_id, reaction_info);
return;
}
// Store reaction infos for later processing after we got the message
debug("Got reaction for %s but dont have message yet %s", message_id, db.get_jid_id(stanza.from.bare_jid).to_string());
if (!reaction_infos.has_key(message_id)) {
reaction_infos[message_id] = new ArrayList<ReactionInfo>();
}
reaction_infos[message_id].add(reaction_info);
}
/*
* When we get a new ContentItem, check if we have any reactions cached that apply to it.
* If so, process the reactions, map and store them.
*/
private void on_new_item(ContentItem item, Conversation conversation) {
string? stanza_id = stream_interactor.get_module(ContentItemStore.IDENTITY).get_message_id_for_content_item(conversation, item);
if (stanza_id == null) return;
Gee.List<ReactionInfo>? reaction_info_list = reaction_infos[stanza_id];
if (reaction_info_list == null) return;
Message? message = stream_interactor.get_module(ContentItemStore.IDENTITY).get_message_for_content_item(conversation, item);
if (message == null) return;
// Check if the (or potentially which) reaction fits the message
var applicable_reactions = new ArrayList<ReactionInfo>();
applicable_reactions.add_all_iterator(reaction_info_list.filter(info => info.conversation.equals(conversation)));
foreach (ReactionInfo applicable_reaction in applicable_reactions) {
reaction_info_list.remove(applicable_reaction);
if (reaction_info_list.is_empty) {
reaction_infos.unset(stanza_id);
}
debug("Got ContentItem for reaction %s", stanza_id);
process_reaction_for_message(item.id, applicable_reaction);
}
}
private Message? get_message_for_reaction(Conversation conversation, string message_id) {
// Query message from a specific account and counterpart. This also makes sure it's a valid reaction for the message.
if (conversation.type_ == Conversation.Type.CHAT) {
return stream_interactor.get_module(MessageStorage.IDENTITY).get_message_by_stanza_id(message_id, conversation);
} else {
return stream_interactor.get_module(MessageStorage.IDENTITY).get_message_by_server_id(message_id, conversation);
}
}
private void process_reaction_for_message(int content_item_id, ReactionInfo reaction_info) {
Account account = reaction_info.conversation.account;
MessageStanza stanza = reaction_info.stanza;
Jid from_jid = reaction_info.from_jid;
Gee.List<string> reactions = reaction_info.reactions;
// Get reaction time
DateTime? reaction_time = null;
DelayedDelivery.MessageFlag? delayed_message_flag = DelayedDelivery.MessageFlag.get_flag(stanza);
if (delayed_message_flag != null) {
reaction_time = delayed_message_flag.datetime;
}
if (reaction_time == null) {
MessageArchiveManagement.MessageFlag? mam_message_flag = MessageArchiveManagement.MessageFlag.get_flag(stanza);
if (mam_message_flag != null) reaction_time = mam_message_flag.server_time;
}
var time_now = new DateTime.now_local();
if (reaction_time == null) reaction_time = time_now;
if (reaction_time.compare(time_now) > 0) {
reaction_time = reaction_info.received_time;
}
int64 reaction_time_long = (int64) (reaction_time.to_unix() * 1000 + reaction_time.get_microsecond() / 1000);
// Get current reactions
string? occupant_id = OccupantIds.get_occupant_id(stanza.stanza);
Jid? real_jid = stream_interactor.get_module(MucManager.IDENTITY).get_real_jid(from_jid, account);
if (stanza.type_ == MessageStanza.TYPE_GROUPCHAT && occupant_id == null && real_jid == null) {
warning("Attempting to add reaction to message w/o knowing occupant id or real jid");
return;
}
ReactionsTime reactions_time = null;
if (stanza.type_ == MessageStanza.TYPE_GROUPCHAT) {
reactions_time = get_muc_user_reactions(account, content_item_id, occupant_id, real_jid);
} else {
reactions_time = get_chat_user_reactions(account, content_item_id, from_jid);
}
if (reaction_time_long <= reactions_time.time) {
// We already have a more recent reaction
return;
}
// Save reactions
if (stanza.type_ == MessageStanza.TYPE_GROUPCHAT) {
save_muc_reactions(account, content_item_id, from_jid, occupant_id, real_jid, reaction_time_long, reactions);
} else {
save_chat_reactions(account, from_jid, content_item_id, reaction_time_long, reactions);
}
// Notify about reaction changes
Gee.List<string>? current_reactions = reactions_time.emojis;
Jid signal_jid = from_jid;
if (stanza.type_ == MessageStanza.TYPE_GROUPCHAT &&
signal_jid.equals(stream_interactor.get_module(MucManager.IDENTITY).get_own_jid(from_jid, account))) {
signal_jid = account.bare_jid;
} else if (stanza.type_ == MessageStanza.TYPE_CHAT) {
signal_jid = signal_jid.bare_jid;
}
foreach (string current_reaction in current_reactions) {
if (!reactions.contains(current_reaction)) {
reaction_removed(account, content_item_id, signal_jid, current_reaction);
}
}
foreach (string new_reaction in reactions) {
if (!current_reactions.contains(new_reaction)) {
reaction_added(account, content_item_id, signal_jid, new_reaction);
}
}
debug("reactions were: ");
foreach (string reac in current_reactions) {
debug(reac);
}
debug("reactions new : ");
foreach (string reac in reactions) {
debug(reac);
}
}
private void save_chat_reactions(Account account, Jid jid, int content_item_id, int64 reaction_time, Gee.List<string> reactions) {
var emoji_builder = new StringBuilder();
for (int i = 0; i < reactions.size; i++) {
if (i != 0) emoji_builder.append(",");
emoji_builder.append(reactions[i]);
}
db.reaction.upsert()
.value(db.reaction.account_id, account.id, true)
.value(db.reaction.content_item_id, content_item_id, true)
.value(db.reaction.jid_id, db.get_jid_id(jid), true)
.value(db.reaction.emojis, emoji_builder.str, false)
.value(db.reaction.time, (long)reaction_time, false)
.perform();
}
private void save_muc_reactions(Account account, int content_item_id, Jid jid, string? occupant_id, Jid? real_jid, int64 reaction_time, Gee.List<string> reactions) {
assert(occupant_id != null || real_jid != null);
int jid_id = db.get_jid_id(jid);
var emoji_builder = new StringBuilder();
for (int i = 0; i < reactions.size; i++) {
if (i != 0) emoji_builder.append(",");
emoji_builder.append(reactions[i]);
}
var builder = db.reaction.upsert()
.value(db.reaction.account_id, account.id, true)
.value(db.reaction.content_item_id, content_item_id, true)
.value(db.reaction.emojis, emoji_builder.str, false)
.value(db.reaction.time, (long)reaction_time, false);
if (real_jid != null) {
builder.value(db.reaction.jid_id, db.get_jid_id(real_jid), occupant_id == null);
}
if (occupant_id != null) {
RowOption row = db.occupantid.select()
.with(db.occupantid.account_id, "=", account.id)
.with(db.occupantid.jid_id, "=", jid_id)
.with(db.occupantid.occupant_id, "=", occupant_id)
.single().row();
int occupant_db_id = -1;
if (row.is_present()) {
occupant_db_id = row[db.occupantid.id];
} else {
occupant_db_id = (int)db.occupantid.upsert()
.value(db.occupantid.account_id, account.id, true)
.value(db.occupantid.jid_id, jid_id, true)
.value(db.occupantid.occupant_id, occupant_id, true)
.value(db.occupantid.last_nick, jid.resourcepart, false)
.perform();
}
builder.value(db.reaction.occupant_id, occupant_db_id, true);
}
builder.perform();
}
}
public class Dino.ReactionUsers {
public string reaction { get; set; }
public Gee.List<Jid> jids { get; set; }
}
public class Dino.ReactionInfo {
public Conversation conversation { get; set; }
public Jid from_jid { get; set; }
public Gee.List<string> reactions { get; set; }
public MessageStanza stanza { get; set; }
public DateTime received_time { get; set; }
}

View File

@ -71,12 +71,6 @@ public class Register : StreamInteractionModule, Object{
return ret;
}
public async string? change_password(Account account, string new_pw){
XmppStream stream = stream_interactor.get_stream(account);
if (stream == null) return null;
return (yield stream.get_module(Xep.InBandRegistration.Module.IDENTITY).change_password(stream, account.full_jid, new_pw)).condition;
}
public class ServerAvailabilityReturn {
public bool available { get; set; }
public TlsCertificateFlags? error_flags { get; set; }
@ -141,7 +135,6 @@ public class Register : StreamInteractionModule, Object{
Gee.List<XmppStreamModule> list = new ArrayList<XmppStreamModule>();
list.add(new Iq.Module());
list.add(new Xep.InBandRegistration.Module());
list.add(new Xep.BitsOfBinary.Module());
XmppStreamResult stream_result = yield Xmpp.establish_stream(jid.domain_jid, list, Application.print_xmpp,
(peer_cert, errors) => { return ConnectionManager.on_invalid_certificate(jid.domainpart, peer_cert, errors); }

View File

@ -1,103 +0,0 @@
using Gee;
using Qlite;
using Xmpp;
using Xmpp.Xep;
using Dino.Entities;
public class Dino.Replies : StreamInteractionModule, Object {
public static ModuleIdentity<Replies> IDENTITY = new ModuleIdentity<Replies>("reply");
public string id { get { return IDENTITY.id; } }
private StreamInteractor stream_interactor;
private Database db;
private HashMap<Conversation, HashMap<string, Gee.List<Message>>> unmapped_replies = new HashMap<Conversation, HashMap<string, Gee.List<Message>>>();
private ReceivedMessageListener received_message_listener;
public static void start(StreamInteractor stream_interactor, Database db) {
Replies m = new Replies(stream_interactor, db);
stream_interactor.add_module(m);
}
private Replies(StreamInteractor stream_interactor, Database db) {
this.stream_interactor = stream_interactor;
this.db = db;
this.received_message_listener = new ReceivedMessageListener(stream_interactor, this);
stream_interactor.get_module(MessageProcessor.IDENTITY).received_pipeline.connect(received_message_listener);
}
public ContentItem? get_quoted_content_item(Message message, Conversation conversation) {
if (message.quoted_item_id == 0) return null;
RowOption row_option = db.reply.select().with(db.reply.message_id, "=", message.id).row();
if (row_option.is_present()) {
return stream_interactor.get_module(ContentItemStore.IDENTITY).get_item_by_id(conversation, row_option[db.reply.quoted_content_item_id]);
}
return null;
}
private void on_incoming_message(Entities.Message message, Xmpp.MessageStanza stanza, Conversation conversation) {
// Check if a previous message was in reply to this one
var reply_qry = db.reply.select();
if (conversation.type_ == Conversation.Type.GROUPCHAT) {
reply_qry.with(db.reply.quoted_message_stanza_id, "=", message.server_id);
} else {
reply_qry.with(db.reply.quoted_message_stanza_id, "=", message.stanza_id);
}
reply_qry.join_with(db.message, db.reply.message_id, db.message.id)
.with(db.message.account_id, "=", conversation.account.id)
.with(db.message.counterpart_id, "=", db.get_jid_id(conversation.counterpart))
.with(db.message.time, ">", (long)message.time.to_unix())
.order_by(db.message.time, "DESC");
foreach (Row reply_row in reply_qry) {
ContentItem? message_item = stream_interactor.get_module(ContentItemStore.IDENTITY).get_item_by_foreign(conversation, 1, message.id);
Message? reply_message = stream_interactor.get_module(MessageStorage.IDENTITY).get_message_by_id(reply_row[db.message.id], conversation);
if (message_item != null && reply_message != null) {
reply_message.set_quoted_item(message_item.id);
}
}
// Handle if this message is a reply
Xep.Replies.ReplyTo? reply_to = Xep.Replies.get_reply_to(stanza);
if (reply_to == null) return;
ContentItem? quoted_content_item = stream_interactor.get_module(ContentItemStore.IDENTITY).get_content_item_for_message_id(conversation, reply_to.to_message_id);
if (quoted_content_item == null) return;
message.set_quoted_item(quoted_content_item.id);
}
private class ReceivedMessageListener : MessageListener {
public string[] after_actions_const = new string[]{ "STORE", "STORE_CONTENT_ITEM" };
public override string action_group { get { return "Quote"; } }
public override string[] after_actions { get { return after_actions_const; } }
private Replies outer;
public ReceivedMessageListener(StreamInteractor stream_interactor, Replies outer) {
this.outer = outer;
}
public override async bool run(Entities.Message message, Xmpp.MessageStanza stanza, Conversation conversation) {
outer.on_incoming_message(message, stanza, conversation);
return false;
}
}
}
namespace Dino {
public string message_body_without_reply_fallback(Message message) {
string body = message.body;
foreach (var fallback in message.get_fallbacks()) {
if (fallback.ns_uri == Xep.Replies.NS_URI && message.quoted_item_id > 0) {
body = body[0:body.index_of_nth_char(fallback.locations[0].from_char)] +
body[body.index_of_nth_char(fallback.locations[0].to_char):body.length];
}
}
return body;
}
}

View File

@ -133,7 +133,6 @@ public class RosterStoreImpl : Roster.Storage, Object {
.value(db.roster.jid, item.jid.to_string(), true)
.value(db.roster.handle, item.name)
.value(db.roster.subscription, item.subscription)
.value(db.roster.ask, item.ask)
.perform();
}

View File

@ -1,80 +0,0 @@
using Gdk;
using GLib;
using Gee;
using Xmpp;
using Xmpp.Xep;
using Dino.Entities;
namespace Dino {
public interface FileMetadataProvider : Object {
public abstract bool supports_file(File file);
public abstract async void fill_metadata(File file, Xep.FileMetadataElement.FileMetadata metadata);
}
class GenericFileMetadataProvider: Dino.FileMetadataProvider, Object {
public bool supports_file(File file) {
return true;
}
public async void fill_metadata(File file, Xep.FileMetadataElement.FileMetadata metadata) {
FileInfo info = file.query_info("*", FileQueryInfoFlags.NONE);
metadata.name = info.get_display_name();
metadata.mime_type = info.get_content_type();
metadata.size = info.get_size();
metadata.date = info.get_modification_date_time();
var checksum_types = new ArrayList<ChecksumType>.wrap(new ChecksumType[] { ChecksumType.SHA256, ChecksumType.SHA512 });
var file_hashes = yield compute_file_hashes(file, checksum_types);
metadata.hashes.add(new CryptographicHashes.Hash.with_checksum(ChecksumType.SHA256, file_hashes[ChecksumType.SHA256]));
metadata.hashes.add(new CryptographicHashes.Hash.with_checksum(ChecksumType.SHA512, file_hashes[ChecksumType.SHA512]));
}
}
public class ImageFileMetadataProvider: Dino.FileMetadataProvider, Object {
public bool supports_file(File file) {
string mime_type = file.query_info("*", FileQueryInfoFlags.NONE).get_content_type();
return Dino.Util.is_pixbuf_supported_mime_type(mime_type);
}
private const int[] THUMBNAIL_DIMS = { 1, 2, 3, 4, 8 };
private const string IMAGE_TYPE = "png";
private const string MIME_TYPE = "image/png";
public async void fill_metadata(File file, Xep.FileMetadataElement.FileMetadata metadata) {
Pixbuf pixbuf = new Pixbuf.from_stream(yield file.read_async());
metadata.width = pixbuf.get_width();
metadata.height = pixbuf.get_height();
float ratio = (float)metadata.width / (float) metadata.height;
int thumbnail_width = -1;
int thumbnail_height = -1;
float diff = float.INFINITY;
for (int i = 0; i < THUMBNAIL_DIMS.length; i++) {
int test_width = THUMBNAIL_DIMS[i];
int test_height = THUMBNAIL_DIMS[THUMBNAIL_DIMS.length - 1 - i];
float test_ratio = (float)test_width / (float)test_height;
float test_diff = (test_ratio - ratio).abs();
if (test_diff < diff) {
thumbnail_width = test_width;
thumbnail_height = test_height;
diff = test_diff;
}
}
Pixbuf thumbnail_pixbuf = pixbuf.scale_simple(thumbnail_width, thumbnail_height, InterpType.BILINEAR);
uint8[] buffer;
thumbnail_pixbuf.save_to_buffer(out buffer, IMAGE_TYPE);
var thumbnail = new Xep.JingleContentThumbnails.Thumbnail();
thumbnail.data = new Bytes.take(buffer);
thumbnail.media_type = MIME_TYPE;
thumbnail.width = thumbnail_width;
thumbnail.height = thumbnail_height;
metadata.thumbnails.add(thumbnail);
}
}
}

View File

@ -1,168 +0,0 @@
using Gdk;
using Gee;
using Xmpp;
using Xmpp.Xep;
using Dino.Entities;
public class Dino.StatelessFileSharing : StreamInteractionModule, Object {
public static ModuleIdentity<StatelessFileSharing> IDENTITY = new ModuleIdentity<StatelessFileSharing>("sfs");
public string id { get { return IDENTITY.id; } }
public const int SFS_PROVIDER_ID = 2;
public StreamInteractor stream_interactor {
owned get { return Application.get_default().stream_interactor; }
private set { }
}
public FileManager file_manager {
owned get { return stream_interactor.get_module(FileManager.IDENTITY); }
private set { }
}
public Database db {
owned get { return Application.get_default().db; }
private set { }
}
private StatelessFileSharing(StreamInteractor stream_interactor, Database db) {
this.stream_interactor = stream_interactor;
this.db = db;
stream_interactor.get_module(MessageProcessor.IDENTITY).received_pipeline.connect(new ReceivedMessageListener(this));
}
public static void start(StreamInteractor stream_interactor, Database db) {
StatelessFileSharing m = new StatelessFileSharing(stream_interactor, db);
stream_interactor.add_module(m);
}
public async void create_file_transfer(Conversation conversation, Message message, string? file_sharing_id, Xep.FileMetadataElement.FileMetadata metadata, Gee.List<Xep.StatelessFileSharing.Source>? sources) {
FileTransfer file_transfer = new FileTransfer();
file_transfer.file_sharing_id = file_sharing_id;
file_transfer.account = message.account;
file_transfer.counterpart = message.counterpart;
file_transfer.ourpart = message.ourpart;
file_transfer.direction = message.direction;
file_transfer.time = message.time;
file_transfer.local_time = message.local_time;
file_transfer.provider = SFS_PROVIDER_ID;
file_transfer.file_metadata = metadata;
file_transfer.info = message.id.to_string();
if (sources != null) {
file_transfer.sfs_sources = sources;
}
stream_interactor.get_module(FileTransferStorage.IDENTITY).add_file(file_transfer);
conversation.last_active = file_transfer.time;
file_manager.received_file(file_transfer, conversation);
}
public void on_received_sources(Jid from, Conversation conversation, string attach_to_message_id, string? attach_to_file_id, Gee.List<Xep.StatelessFileSharing.Source> sources) {
Message? message = stream_interactor.get_module(MessageStorage.IDENTITY).get_message_by_referencing_id(attach_to_message_id, conversation);
if (message == null) return;
FileTransfer? file_transfer = null;
if (attach_to_file_id != null) {
file_transfer = stream_interactor.get_module(FileTransferStorage.IDENTITY).get_files_by_message_and_file_id(message.id, attach_to_file_id, conversation);
} else {
file_transfer = stream_interactor.get_module(FileTransferStorage.IDENTITY).get_file_by_message_id(message.id, conversation);
}
if (file_transfer == null) return;
// "If no <hash/> is provided or the <hash/> elements provided use unsupported algorithms, receiving clients MUST ignore
// any attached sources from other senders and only obtain the file from the sources announced by the original sender."
// For now we only allow the original sender
if (from.equals(file_transfer.from) && Xep.CryptographicHashes.get_supported_hashes(file_transfer.hashes).is_empty) {
warning("Ignoring sfs source: Not from original sender or no known file hashes");
return;
}
foreach (var source in sources) {
file_transfer.add_sfs_source(source);
}
if (file_manager.is_sender_trustworthy(file_transfer, conversation) && file_transfer.state == FileTransfer.State.NOT_STARTED && file_transfer.size >= 0 && file_transfer.size < 5000000) {
file_manager.download_file(file_transfer);
}
}
/*
public async void create_sfs_for_legacy_transfer(FileProvider file_provider, string info, Jid from, DateTime time, DateTime local_time, Conversation conversation, FileReceiveData receive_data, FileMeta file_meta) {
FileTransfer file_transfer = file_manager.create_file_transfer_from_provider_incoming(file_provider, info, from, time, local_time, conversation, receive_data, file_meta);
HttpFileReceiveData? http_receive_data = receive_data as HttpFileReceiveData;
if (http_receive_data == null) return;
var sources = new ArrayList<Xep.StatelessFileSharing.Source>();
Xep.StatelessFileSharing.HttpSource source = new Xep.StatelessFileSharing.HttpSource();
source.url = http_receive_data.url;
sources.add(source);
if (file_manager.is_jid_trustworthy(from, conversation)) {
try {
file_meta = yield file_provider.get_meta_info(file_transfer, http_receive_data, file_meta);
} catch (Error e) {
warning("Http meta request failed: %s", e.message);
}
}
var metadata = new Xep.FileMetadataElement.FileMetadata();
metadata.size = file_meta.size;
metadata.name = file_meta.file_name;
metadata.mime_type = file_meta.mime_type;
file_transfer.provider = SFS_PROVIDER_ID;
file_transfer.file_metadata = metadata;
file_transfer.sfs_sources = sources;
}
*/
private class ReceivedMessageListener : MessageListener {
public string[] after_actions_const = new string[]{ "STORE" };
public override string action_group { get { return "MESSAGE_REINTERPRETING"; } }
public override string[] after_actions { get { return after_actions_const; } }
private StatelessFileSharing outer;
private StreamInteractor stream_interactor;
public ReceivedMessageListener(StatelessFileSharing outer) {
this.outer = outer;
this.stream_interactor = outer.stream_interactor;
}
public override async bool run(Entities.Message message, Xmpp.MessageStanza stanza, Conversation conversation) {
Gee.List<Xep.StatelessFileSharing.FileShare> file_shares = Xep.StatelessFileSharing.get_file_shares(stanza);
if (file_shares != null) {
// For now, only accept file shares that have at least one supported hash
foreach (Xep.StatelessFileSharing.FileShare file_share in file_shares) {
if (!Xep.CryptographicHashes.has_supported_hashes(file_share.metadata.hashes)) {
return false;
}
}
foreach (Xep.StatelessFileSharing.FileShare file_share in file_shares) {
outer.create_file_transfer(conversation, message, file_share.id, file_share.metadata, file_share.sources);
}
return true;
}
var source_attachments = Xep.StatelessFileSharing.get_source_attachments(stanza);
if (source_attachments != null) {
foreach (var source_attachment in source_attachments) {
outer.on_received_sources(stanza.from, conversation, source_attachment.to_message_id, source_attachment.to_file_transfer_id, source_attachment.sources);
return true;
}
}
// Don't process messages that are fallback for legacy clients
if (Xep.StatelessFileSharing.is_sfs_fallback_message(stanza)) {
return true;
}
return false;
}
}
}

View File

@ -89,12 +89,7 @@ public class ModuleIdentity<T> : Object {
}
public T? cast(StreamInteractionModule module) {
#if VALA_0_56_11
// We can't typecheck due to compiler bug
return (T) module;
#else
return module.get_type().is_a(typeof(T)) ? (T?) module : null;
#endif
}
public bool matches(StreamInteractionModule module) {

Some files were not shown because too many files have changed in this diff Show More