Compare commits

...

8 Commits

Author SHA1 Message Date
Marvin W
baf96d9d9f
Check sender of bookmark:1 updates 2023-03-23 12:06:36 -06:00
Marvin W
179c766d19
Bind soup session lifetime to File provider/sender lifetime
Required since libsoup 3.4. Fixes #1395
2023-03-22 13:22:23 -06:00
Bohdan Horbeshko
004824040d
Fix a crash if a message subnode is not found in a carbon
Fixes #1392
2023-03-22 09:59:55 -06:00
Michael Vetter
b6f9b54d76
Remove gspell
7e7dcedaf ported from GTK3 to GTK4.
It also removed gspell from main/CMakeLists.txt.

I assume that gspell is not needed anymore and we can thus remove the
requirement from the CI and the cmake file as well.
2023-03-22 09:59:55 -06:00
Sebastian Krzyszkowiak
1738bf8dc8
data: Set StartupNotify to true in .desktop file
GTK handles startup notifications, so advertise it in desktop
file. This allows splash screens and other startup indications
in DEs to work.
2023-03-22 09:59:54 -06:00
Marvin W
481a68fd89
Improve database performance while reconnecting and syncing
Also move some tasks to low priority idle queue so they won't block UI updates
2023-03-22 09:59:54 -06:00
Marvin W
89b9110fcb
Improve history sync
- Ensure we fully fetch desired history if possible (previously, duplicates
  from offline message queue could hinder MAM sync)
- Early drop illegal MAM messages so they don't pile up in the pending queue
  waiting for their query to end (which it never will if they were not
  requested in first place).

Fixes #1386
2023-03-22 09:59:54 -06:00
Marvin W
acf9c69470
Fix C binding for gst_video_frame_get_data
Fixes #1267
2023-03-22 09:59:54 -06:00
12 changed files with 99 additions and 78 deletions

View File

@ -7,7 +7,7 @@ jobs:
- uses: actions/checkout@v2
- run: sudo apt-get update
- run: sudo apt-get remove libunwind-14-dev
- run: sudo apt-get install -y build-essential gettext cmake valac libgee-0.8-dev libsqlite3-dev libgtk-4-dev libnotify-dev libgpgme-dev libsoup2.4-dev libgcrypt20-dev libqrencode-dev libgspell-1-dev libnice-dev libgstreamer1.0-dev libgstreamer-plugins-base1.0-dev libsrtp2-dev libwebrtc-audio-processing-dev libadwaita-1-dev
- run: sudo apt-get install -y build-essential gettext cmake valac libgee-0.8-dev libsqlite3-dev libgtk-4-dev libnotify-dev libgpgme-dev libsoup2.4-dev libgcrypt20-dev libqrencode-dev libnice-dev libgstreamer1.0-dev libgstreamer-plugins-base1.0-dev libsrtp2-dev libwebrtc-audio-processing-dev libadwaita-1-dev
- run: ./configure --with-tests --with-libsignal-in-tree
- run: make
- run: build/xmpp-vala-test

View File

@ -1,14 +0,0 @@
include(PkgConfigWithFallback)
find_pkg_config_with_fallback(Gspell
PKG_CONFIG_NAME gspell-1
LIB_NAMES gspell-1
INCLUDE_NAMES gspell.h
INCLUDE_DIR_SUFFIXES gspell-1 gspell-1/gspell
DEPENDS GTK3
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Gspell
REQUIRED_VARS Gspell_LIBRARY
VERSION_VAR Gspell_VERSION)

View File

@ -7,7 +7,7 @@ using Dino.Entities;
namespace Dino {
public class Database : Qlite.Database {
private const int VERSION = 25;
private const int VERSION = 26;
public class AccountTable : Table {
public Column<int> id = new Column.Integer("id") { primary_key = true, auto_increment = true };
@ -93,6 +93,11 @@ public class Database : Qlite.Database {
// deduplication
index("message_account_counterpart_stanzaid_idx", {account_id, counterpart_id, stanza_id});
index("message_account_counterpart_serverid_idx", {account_id, counterpart_id, server_id});
// message by marked
index("message_account_marked_idx", {account_id, marked});
fts({body});
}
}

View File

@ -163,7 +163,7 @@ public class Dino.HistorySync {
if (current_row[db.mam_catchup.from_end]) return;
debug("[%s] Fetching between ranges %s - %s", mam_server.to_string(), previous_row[db.mam_catchup.to_time].to_string(), current_row[db.mam_catchup.from_time].to_string());
current_row = yield fetch_between_ranges(account, mam_server, previous_row, current_row);
current_row = yield fetch_between_ranges(account, mam_server, previous_row, current_row, cancellable);
if (current_row == null) return;
RowOption previous_row_opt = db.mam_catchup.select()
@ -214,13 +214,11 @@ public class Dino.HistorySync {
return null;
}
// If we get PageResult.Duplicate, we still want to update the db row to the latest message.
// Catchup finished within first page. Update latest db entry.
if (latest_row_id != -1 &&
page_result.page_result in new PageResult[] { PageResult.TargetReached, PageResult.NoMoreMessages, PageResult.Duplicate }) {
page_result.page_result in new PageResult[] { PageResult.TargetReached, PageResult.NoMoreMessages }) {
if (page_result.stanzas == null || page_result.stanzas.is_empty) return null;
if (page_result.stanzas == null) return null;
string latest_mam_id = page_result.query_result.last;
long latest_mam_time = (long) mam_times[account][latest_mam_id].to_unix();
@ -272,7 +270,7 @@ public class Dino.HistorySync {
** Merges the `earlier_range` db row into the `later_range` db row.
** @return The resulting range comprising `earlier_range`, `later_rage`, and everything in between. null if fetching/merge failed.
**/
private async Row? fetch_between_ranges(Account account, Jid mam_server, Row earlier_range, Row later_range) {
private async Row? fetch_between_ranges(Account account, Jid mam_server, Row earlier_range, Row later_range, Cancellable? cancellable = null) {
int later_range_id = (int) later_range[db.mam_catchup.id];
DateTime earliest_time = new DateTime.from_unix_utc(earlier_range[db.mam_catchup.to_time]);
DateTime latest_time = new DateTime.from_unix_utc(later_range[db.mam_catchup.from_time]);
@ -282,9 +280,9 @@ public class Dino.HistorySync {
earliest_time, earlier_range[db.mam_catchup.to_id],
latest_time, later_range[db.mam_catchup.from_id]);
PageRequestResult page_result = yield fetch_query(account, query_params, later_range_id);
PageRequestResult page_result = yield fetch_query(account, query_params, later_range_id, cancellable);
if (page_result.page_result == PageResult.TargetReached) {
if (page_result.page_result == PageResult.TargetReached || page_result.page_result == PageResult.NoMoreMessages) {
debug("[%s | %s] Merging range %i into %i", account.bare_jid.to_string(), mam_server.to_string(), earlier_range[db.mam_catchup.id], later_range_id);
// Merge earlier range into later one.
db.mam_catchup.update()
@ -330,9 +328,9 @@ public class Dino.HistorySync {
PageRequestResult? page_result = null;
do {
page_result = yield get_mam_page(account, query_params, page_result, cancellable);
debug("Page result %s %b", page_result.page_result.to_string(), page_result.stanzas == null);
debug("[%s | %s] Page result %s (got stanzas: %s)", account.bare_jid.to_string(), query_params.mam_server.to_string(), page_result.page_result.to_string(), (page_result.stanzas != null).to_string());
if (page_result.page_result == PageResult.Error || page_result.page_result == PageResult.Cancelled || page_result.stanzas == null) return page_result;
if (page_result.page_result == PageResult.Error || page_result.page_result == PageResult.Cancelled || page_result.query_result.first == null) return page_result;
string earliest_mam_id = page_result.query_result.first;
long earliest_mam_time = (long)mam_times[account][earliest_mam_id].to_unix();
@ -357,7 +355,6 @@ public class Dino.HistorySync {
MorePagesAvailable,
TargetReached,
NoMoreMessages,
Duplicate,
Error,
Cancelled
}
@ -399,23 +396,25 @@ public class Dino.HistorySync {
string query_id = query_params.query_id;
string? after_id = query_params.start_id;
var stanzas_for_query = stanzas.has_key(query_id) && !stanzas[query_id].is_empty ? stanzas[query_id] : null;
if (cancellable != null && cancellable.is_cancelled()) {
return new PageRequestResult(PageResult.Cancelled, query_result, stanzas[query_id]);
stanzas.unset(query_id);
return new PageRequestResult(PageResult.Cancelled, query_result, stanzas_for_query);
}
if (stanzas.has_key(query_id) && !stanzas[query_id].is_empty) {
if (stanzas_for_query != null) {
// Check it we reached our target (from_id)
foreach (Xmpp.MessageStanza message in stanzas[query_id]) {
foreach (Xmpp.MessageStanza message in stanzas_for_query) {
Xmpp.MessageArchiveManagement.MessageFlag? mam_message_flag = Xmpp.MessageArchiveManagement.MessageFlag.get_flag(message);
if (mam_message_flag != null && mam_message_flag.mam_id != null) {
if (after_id != null && mam_message_flag.mam_id == after_id) {
// Successfully fetched the whole range
yield send_messages_back_into_pipeline(account, query_id, cancellable);
if (cancellable != null && cancellable.is_cancelled()) {
return new PageRequestResult(PageResult.Cancelled, query_result, stanzas[query_id]);
return new PageRequestResult(PageResult.Cancelled, query_result, stanzas_for_query);
}
return new PageRequestResult(PageResult.TargetReached, query_result, stanzas[query_id]);
return new PageRequestResult(PageResult.TargetReached, query_result, stanzas_for_query);
}
}
}
@ -423,37 +422,9 @@ public class Dino.HistorySync {
// Message got filtered out by xmpp-vala, but succesful range fetch nevertheless
yield send_messages_back_into_pipeline(account, query_id);
if (cancellable != null && cancellable.is_cancelled()) {
return new PageRequestResult(PageResult.Cancelled, query_result, stanzas[query_id]);
return new PageRequestResult(PageResult.Cancelled, query_result, stanzas_for_query);
}
return new PageRequestResult(PageResult.TargetReached, query_result, stanzas[query_id]);
}
// Check for duplicates. Go through all messages and build a db query.
foreach (Xmpp.MessageStanza message in stanzas[query_id]) {
Xmpp.MessageArchiveManagement.MessageFlag? mam_message_flag = Xmpp.MessageArchiveManagement.MessageFlag.get_flag(message);
if (mam_message_flag != null && mam_message_flag.mam_id != null) {
if (selection == null) {
selection = @"$(db.message.server_id) = ?";
} else {
selection += @" OR $(db.message.server_id) = ?";
}
selection_args += mam_message_flag.mam_id;
}
}
var duplicates_qry = db.message.select()
.with(db.message.account_id, "=", account.id)
.where(selection, selection_args);
// We don't want messages from different MAM servers to interfere with each other.
if (!query_params.mam_server.equals_bare(account.bare_jid)) {
duplicates_qry.with(db.message.counterpart_id, "=", db.get_jid_id(query_params.mam_server));
} else {
duplicates_qry.with(db.message.type_, "=", Message.Type.CHAT);
}
var duplicates_count = duplicates_qry.count();
if (duplicates_count > 0) {
// We got a duplicate although we thought we have to catch up.
// There was a server bug where prosody would send all messages if it didn't know the after ID that was given
page_result = PageResult.Duplicate;
return new PageRequestResult(PageResult.TargetReached, query_result, stanzas_for_query);
}
}
@ -461,7 +432,7 @@ public class Dino.HistorySync {
if (cancellable != null && cancellable.is_cancelled()) {
page_result = PageResult.Cancelled;
}
return new PageRequestResult(page_result, query_result, stanzas.has_key(query_id) ? stanzas[query_id] : null);
return new PageRequestResult(page_result, query_result, stanzas_for_query);
}
private async void send_messages_back_into_pipeline(Account account, string query_id, Cancellable? cancellable = null) {

View File

@ -5,7 +5,7 @@ GenericName=Jabber/XMPP Client
Keywords=chat;talk;im;message;xmpp;jabber;
Exec=dino %U
Icon=im.dino.Dino
StartupNotify=false
StartupNotify=true
Terminal=false
Type=Application
Categories=GTK;Network;Chat;InstantMessaging;

View File

@ -225,7 +225,21 @@ public class ConversationSelectorRow : ListBoxRow {
label.attributes = copy;
}
private bool update_read_pending = false;
private bool update_read_pending_force = false;
protected void update_read(bool force_update = false) {
if (force_update) update_read_pending_force = true;
if (update_read_pending) return;
update_read_pending = true;
Idle.add(() => {
update_read_pending = false;
update_read_pending_force = false;
update_read_idle(update_read_pending_force);
return Source.REMOVE;
}, Priority.LOW);
}
private void update_read_idle(bool force_update = false) {
int current_num_unread = stream_interactor.get_module(ChatInteraction.IDENTITY).get_num_unread(conversation);
if (num_unread == current_num_unread && !force_update) return;
num_unread = current_num_unread;

View File

@ -10,13 +10,16 @@ public class FileProvider : Dino.FileProvider, Object {
private StreamInteractor stream_interactor;
private Dino.Database dino_db;
private Soup.Session session;
private static Regex http_url_regex = /^https?:\/\/([^\s#]*)$/; // Spaces are invalid in URLs and we can't use fragments for downloads
private static Regex omemo_url_regex = /^aesgcm:\/\/(.*)#(([A-Fa-f0-9]{2}){48}|([A-Fa-f0-9]{2}){44})$/;
public FileProvider(StreamInteractor stream_interactor, Dino.Database dino_db) {
this.stream_interactor = stream_interactor;
this.dino_db = dino_db;
this.session = new Soup.Session();
session.user_agent = @"Dino/$(Dino.get_short_version()) ";
stream_interactor.get_module(MessageProcessor.IDENTITY).received_pipeline.connect(new ReceivedMessageListener(this));
}
@ -114,8 +117,6 @@ public class FileProvider : Dino.FileProvider, Object {
HttpFileReceiveData? http_receive_data = receive_data as HttpFileReceiveData;
if (http_receive_data == null) return file_meta;
var session = new Soup.Session();
session.user_agent = @"Dino/$(Dino.get_short_version()) ";
var head_message = new Soup.Message("HEAD", http_receive_data.url);
head_message.request_headers.append("Accept-Encoding", "identity");
@ -150,8 +151,6 @@ public class FileProvider : Dino.FileProvider, Object {
HttpFileReceiveData? http_receive_data = receive_data as HttpFileReceiveData;
if (http_receive_data == null) assert(false);
var session = new Soup.Session();
session.user_agent = @"Dino/$(Dino.get_short_version()) ";
var get_message = new Soup.Message("GET", http_receive_data.url);
try {

View File

@ -7,12 +7,15 @@ namespace Dino.Plugins.HttpFiles {
public class HttpFileSender : FileSender, Object {
private StreamInteractor stream_interactor;
private Database db;
private Soup.Session session;
private HashMap<Account, long> max_file_sizes = new HashMap<Account, long>(Account.hash_func, Account.equals_func);
public HttpFileSender(StreamInteractor stream_interactor, Database db) {
this.stream_interactor = stream_interactor;
this.db = db;
this.session = new Soup.Session();
session.user_agent = @"Dino/$(Dino.get_short_version()) ";
stream_interactor.stream_negotiated.connect(on_stream_negotiated);
stream_interactor.get_module(MessageProcessor.IDENTITY).build_message_stanza.connect(check_add_oob);
}
@ -90,8 +93,6 @@ public class HttpFileSender : FileSender, Object {
Xmpp.XmppStream? stream = stream_interactor.get_stream(file_transfer.account);
if (stream == null) return;
var session = new Soup.Session();
session.user_agent = @"Dino/$(Dino.get_short_version()) ";
var put_message = new Soup.Message("PUT", file_send_data.url_up);
#if SOUP_3_0
put_message.set_request_body(file_meta.mime_type, file_transfer.input_stream, (ssize_t) file_meta.size);

View File

@ -1,4 +1,5 @@
private static extern unowned Gst.Video.Info gst_video_frame_get_video_info(Gst.Video.Frame frame);
[CCode (array_length_type = "size_t", type = "void*")]
private static extern unowned uint8[] gst_video_frame_get_data(Gst.Video.Frame frame);
public class Dino.Plugins.Rtp.Paintable : Gdk.Paintable, Object {

View File

@ -58,6 +58,10 @@ public class ReceivedPipelineListener : StanzaListener<MessageStanza> {
warning("Received alleged carbon message from %s, ignoring", message.from.to_string());
return true;
}
if (message_node == null) {
warning("Received a carbon message with no message subnode in jabber:client namespace from %s, ignoring", message.from.to_string());
return true;
}
if (received_node != null) {
message.add_flag(new MessageFlag(MessageFlag.TYPE_RECEIVED));
} else if (sent_node != null) {

View File

@ -11,8 +11,8 @@ public class QueryResult {
public bool error { get; set; default=false; }
public bool malformed { get; set; default=false; }
public bool complete { get; set; default=false; }
public string first { get; set; }
public string last { get; set; }
public string? first { get; set; }
public string? last { get; set; }
}
public class Module : XmppStreamModule {
@ -65,16 +65,17 @@ public class Module : XmppStreamModule {
}
StanzaNode query_node = new StanzaNode.build("query", NS_VER(stream)).add_self_xmlns().put_node(data_form.get_submit_node());
if (queryid != null) {
query_node.put_attribute("queryid", queryid);
}
return query_node;
}
internal async QueryResult query_archive(XmppStream stream, string ns, Jid? mam_server, StanzaNode query_node, Cancellable? cancellable = null) {
var res = new QueryResult();
if (stream.get_flag(Flag.IDENTITY) == null) { res.error = true; return res; }
var res = new QueryResult();
Flag? flag = stream.get_flag(Flag.IDENTITY);
string? query_id = query_node.get_attribute("queryid");
if (flag == null || query_id == null) { res.error = true; return res; }
flag.active_query_ids.add(query_id);
// Build and send query
Iq.Stanza iq = new Iq.Stanza.set(query_node) { to=mam_server };
@ -93,6 +94,11 @@ public class Module : XmppStreamModule {
if ((res.first == null) != (res.last == null)) { res.malformed = true; return res; }
res.complete = fin_node.get_attribute_bool("complete", false, ns);
Idle.add(() => {
flag.active_query_ids.remove(query_id);
return Source.REMOVE;
}, Priority.LOW);
return res;
}
@ -104,7 +110,8 @@ public class ReceivedPipelineListener : StanzaListener<MessageStanza> {
public override string[] after_actions { get { return after_actions_const; } }
public override async bool run(XmppStream stream, MessageStanza message) {
if (stream.get_flag(Flag.IDENTITY) == null) return false;
Flag? flag = stream.get_flag(Flag.IDENTITY);
if (flag == null) return false;
StanzaNode? message_node = message.stanza.get_deep_subnode(NS_VER(stream) + ":result", StanzaForwarding.NS_URI + ":forwarded", Xmpp.NS_URI + ":message");
if (message_node != null) {
@ -112,6 +119,28 @@ public class ReceivedPipelineListener : StanzaListener<MessageStanza> {
DateTime? datetime = DelayedDelivery.get_time_for_node(forward_node);
string? mam_id = message.stanza.get_deep_attribute(NS_VER(stream) + ":result", NS_VER(stream) + ":id");
string? query_id = message.stanza.get_deep_attribute(NS_VER(stream) + ":result", NS_VER(stream) + ":queryid");
if (query_id == null) {
warning("Received MAM message without queryid from %s, ignoring", message.from.to_string());
return true;
}
if (!flag.active_query_ids.contains(query_id)) {
warning("Received MAM message from %s with unknown query id %s, ignoring", message.from.to_string(), query_id ?? "<none>");
return true;
}
Jid? inner_from = null;
try {
inner_from = new Jid(message_node.get_attribute("from"));
} catch (InvalidJidError e) {
warning("Received MAM message with invalid from attribute in forwarded message from %s, ignoring", message.from.to_string());
return true;
}
if (!message.from.equals(stream.get_flag(Bind.Flag.IDENTITY).my_jid.bare_jid) && !message.from.equals_bare(inner_from)) {
warning("Received MAM message from %s illegally impersonating %s, ignoring", message.from.to_string(), inner_from.to_string());
return true;
}
message.add_flag(new MessageFlag(message.from, datetime, mam_id, query_id));
message.stanza = message_node;
@ -124,6 +153,7 @@ public class ReceivedPipelineListener : StanzaListener<MessageStanza> {
public class Flag : XmppStreamFlag {
public static FlagIdentity<Flag> IDENTITY = new FlagIdentity<Flag>(NS_URI, "message_archive_management");
public bool cought_up { get; set; default=false; }
public Gee.Set<string> active_query_ids { get; set; default = new HashSet<string>(); }
public string ns_ver;
public Flag(string ns_ver) {

View File

@ -68,6 +68,11 @@ public class Module : BookmarksProvider, XmppStreamModule {
}
private void on_pupsub_item(XmppStream stream, Jid jid, string id, StanzaNode? node) {
if (!jid.equals(stream.get_flag(Bind.Flag.IDENTITY).my_jid.bare_jid)) {
warning("Received alleged bookmarks:1 item from %s, ignoring", jid.to_string());
return;
}
Conference conference = parse_item_node(node, id);
Flag? flag = stream.get_flag(Flag.IDENTITY);
if (flag != null) {
@ -77,6 +82,11 @@ public class Module : BookmarksProvider, XmppStreamModule {
}
private void on_pupsub_retract(XmppStream stream, Jid jid, string id) {
if (!jid.equals(stream.get_flag(Bind.Flag.IDENTITY).my_jid.bare_jid)) {
warning("Received alleged bookmarks:1 retract from %s, ignoring", jid.to_string());
return;
}
try {
Jid jid_parsed = new Jid(id);
Flag? flag = stream.get_flag(Flag.IDENTITY);