Remove dependency on matrix-react-sdk
This commit is contained in:
@@ -10,7 +10,7 @@ import { Item } from "@react-stately/collections";
|
||||
import { useMediaHandler } from "./useMediaHandler";
|
||||
import { FieldRow, InputField } from "../input/Input";
|
||||
import { Button } from "../button";
|
||||
import { useDownloadDebugLog } from "./rageshake";
|
||||
import { useDownloadDebugLog } from "./submit-rageshake";
|
||||
import { Body } from "../typography/Typography";
|
||||
|
||||
export function SettingsModal({ setShowInspector, showInspector, ...rest }) {
|
||||
|
||||
@@ -1,300 +1,535 @@
|
||||
import { useCallback, useContext, useEffect, useState } from "react";
|
||||
import * as rageshake from "matrix-react-sdk/src/rageshake/rageshake";
|
||||
import pako from "pako";
|
||||
import { useClient } from "../ClientContext";
|
||||
import { InspectorContext } from "../room/GroupCallInspector";
|
||||
import { useModalTriggerState } from "../Modal";
|
||||
/*
|
||||
Copyright 2017 OpenMarket Ltd
|
||||
Copyright 2018 New Vector Ltd
|
||||
Copyright 2019 The Matrix.org Foundation C.I.C.
|
||||
|
||||
export function useSubmitRageshake() {
|
||||
const { client } = useClient();
|
||||
const [{ json }] = useContext(InspectorContext);
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
const [{ sending, sent, error }, setState] = useState({
|
||||
sending: false,
|
||||
sent: false,
|
||||
error: null,
|
||||
});
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
const submitRageshake = useCallback(
|
||||
async (opts) => {
|
||||
if (sending) {
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
// This module contains all the code needed to log the console, persist it to
|
||||
// disk and submit bug reports. Rationale is as follows:
|
||||
// - Monkey-patching the console is preferable to having a log library because
|
||||
// we can catch logs by other libraries more easily, without having to all
|
||||
// depend on the same log framework / pass the logger around.
|
||||
// - We use IndexedDB to persists logs because it has generous disk space
|
||||
// limits compared to local storage. IndexedDB does not work in incognito
|
||||
// mode, in which case this module will not be able to write logs to disk.
|
||||
// However, the logs will still be stored in-memory, so can still be
|
||||
// submitted in a bug report should the user wish to: we can also store more
|
||||
// logs in-memory than in local storage, which does work in incognito mode.
|
||||
// We also need to handle the case where there are 2+ tabs. Each JS runtime
|
||||
// generates a random string which serves as the "ID" for that tab/session.
|
||||
// These IDs are stored along with the log lines.
|
||||
// - Bug reports are sent as a POST over HTTPS: it purposefully does not use
|
||||
// Matrix as bug reports may be made when Matrix is not responsive (which may
|
||||
// be the cause of the bug). We send the most recent N MB of UTF-8 log data,
|
||||
// starting with the most recent, which we know because the "ID"s are
|
||||
// actually timestamps. We then purge the remaining logs. We also do this
|
||||
// purge on startup to prevent logs from accumulating.
|
||||
|
||||
// the frequency with which we flush to indexeddb
|
||||
import { logger } from "matrix-js-sdk/src/logger";
|
||||
|
||||
const FLUSH_RATE_MS = 30 * 1000;
|
||||
|
||||
// the length of log data we keep in indexeddb (and include in the reports)
|
||||
const MAX_LOG_SIZE = 1024 * 1024 * 5; // 5 MB
|
||||
|
||||
// A class which monkey-patches the global console and stores log lines.
|
||||
export class ConsoleLogger {
|
||||
logs = "";
|
||||
|
||||
monkeyPatch(consoleObj) {
|
||||
// Monkey-patch console logging
|
||||
const consoleFunctionsToLevels = {
|
||||
log: "I",
|
||||
info: "I",
|
||||
warn: "W",
|
||||
error: "E",
|
||||
};
|
||||
Object.keys(consoleFunctionsToLevels).forEach((fnName) => {
|
||||
const level = consoleFunctionsToLevels[fnName];
|
||||
const originalFn = consoleObj[fnName].bind(consoleObj);
|
||||
consoleObj[fnName] = (...args) => {
|
||||
this.log(level, ...args);
|
||||
originalFn(...args);
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
log(level, ...args) {
|
||||
// We don't know what locale the user may be running so use ISO strings
|
||||
const ts = new Date().toISOString();
|
||||
|
||||
// Convert objects and errors to helpful things
|
||||
args = args.map((arg) => {
|
||||
if (arg instanceof DOMException) {
|
||||
return arg.message + ` (${arg.name} | ${arg.code})`;
|
||||
} else if (arg instanceof Error) {
|
||||
return arg.message + (arg.stack ? `\n${arg.stack}` : "");
|
||||
} else if (typeof arg === "object") {
|
||||
try {
|
||||
return JSON.stringify(arg);
|
||||
} catch (e) {
|
||||
// In development, it can be useful to log complex cyclic
|
||||
// objects to the console for inspection. This is fine for
|
||||
// the console, but default `stringify` can't handle that.
|
||||
// We workaround this by using a special replacer function
|
||||
// to only log values of the root object and avoid cycles.
|
||||
return JSON.stringify(arg, (key, value) => {
|
||||
if (key && typeof value === "object") {
|
||||
return "<object>";
|
||||
}
|
||||
return value;
|
||||
});
|
||||
}
|
||||
} else {
|
||||
return arg;
|
||||
}
|
||||
});
|
||||
|
||||
// Some browsers support string formatting which we're not doing here
|
||||
// so the lines are a little more ugly but easy to implement / quick to
|
||||
// run.
|
||||
// Example line:
|
||||
// 2017-01-18T11:23:53.214Z W Failed to set badge count
|
||||
let line = `${ts} ${level} ${args.join(" ")}\n`;
|
||||
// Do some cleanup
|
||||
line = line.replace(/token=[a-zA-Z0-9-]+/gm, "token=xxxxx");
|
||||
// Using + really is the quickest way in JS
|
||||
// http://jsperf.com/concat-vs-plus-vs-join
|
||||
this.logs += line;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve log lines to flush to disk.
|
||||
* @param {boolean} keepLogs True to not delete logs after flushing.
|
||||
* @return {string} \n delimited log lines to flush.
|
||||
*/
|
||||
flush(keepLogs) {
|
||||
// The ConsoleLogger doesn't care how these end up on disk, it just
|
||||
// flushes them to the caller.
|
||||
if (keepLogs) {
|
||||
return this.logs;
|
||||
}
|
||||
const logsToFlush = this.logs;
|
||||
this.logs = "";
|
||||
return logsToFlush;
|
||||
}
|
||||
}
|
||||
|
||||
// A class which stores log lines in an IndexedDB instance.
|
||||
export class IndexedDBLogStore {
|
||||
index = 0;
|
||||
db = null;
|
||||
flushPromise = null;
|
||||
flushAgainPromise = null;
|
||||
|
||||
constructor(indexedDB, logger) {
|
||||
this.indexedDB = indexedDB;
|
||||
this.logger = logger;
|
||||
this.id = "instance-" + Math.random() + Date.now();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {Promise} Resolves when the store is ready.
|
||||
*/
|
||||
connect() {
|
||||
const req = this.indexedDB.open("logs");
|
||||
return new Promise((resolve, reject) => {
|
||||
req.onsuccess = (event) => {
|
||||
// @ts-ignore
|
||||
this.db = event.target.result;
|
||||
// Periodically flush logs to local storage / indexeddb
|
||||
setInterval(this.flush.bind(this), FLUSH_RATE_MS);
|
||||
resolve();
|
||||
};
|
||||
|
||||
req.onerror = (event) => {
|
||||
const err =
|
||||
// @ts-ignore
|
||||
"Failed to open log database: " + event.target.error.name;
|
||||
logger.error(err);
|
||||
reject(new Error(err));
|
||||
};
|
||||
|
||||
// First time: Setup the object store
|
||||
req.onupgradeneeded = (event) => {
|
||||
// @ts-ignore
|
||||
const db = event.target.result;
|
||||
const logObjStore = db.createObjectStore("logs", {
|
||||
keyPath: ["id", "index"],
|
||||
});
|
||||
// Keys in the database look like: [ "instance-148938490", 0 ]
|
||||
// Later on we need to query everything based on an instance id.
|
||||
// In order to do this, we need to set up indexes "id".
|
||||
logObjStore.createIndex("id", "id", { unique: false });
|
||||
|
||||
logObjStore.add(
|
||||
this.generateLogEntry(new Date() + " ::: Log database was created.")
|
||||
);
|
||||
|
||||
const lastModifiedStore = db.createObjectStore("logslastmod", {
|
||||
keyPath: "id",
|
||||
});
|
||||
lastModifiedStore.add(this.generateLastModifiedTime());
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Flush logs to disk.
|
||||
*
|
||||
* There are guards to protect against race conditions in order to ensure
|
||||
* that all previous flushes have completed before the most recent flush.
|
||||
* Consider without guards:
|
||||
* - A calls flush() periodically.
|
||||
* - B calls flush() and wants to send logs immediately afterwards.
|
||||
* - If B doesn't wait for A's flush to complete, B will be missing the
|
||||
* contents of A's flush.
|
||||
* To protect against this, we set 'flushPromise' when a flush is ongoing.
|
||||
* Subsequent calls to flush() during this period will chain another flush,
|
||||
* then keep returning that same chained flush.
|
||||
*
|
||||
* This guarantees that we will always eventually do a flush when flush() is
|
||||
* called.
|
||||
*
|
||||
* @return {Promise} Resolved when the logs have been flushed.
|
||||
*/
|
||||
flush() {
|
||||
// check if a flush() operation is ongoing
|
||||
if (this.flushPromise) {
|
||||
if (this.flushAgainPromise) {
|
||||
// this is the 3rd+ time we've called flush() : return the same promise.
|
||||
return this.flushAgainPromise;
|
||||
}
|
||||
// queue up a flush to occur immediately after the pending one completes.
|
||||
this.flushAgainPromise = this.flushPromise
|
||||
.then(() => {
|
||||
return this.flush();
|
||||
})
|
||||
.then(() => {
|
||||
this.flushAgainPromise = null;
|
||||
});
|
||||
return this.flushAgainPromise;
|
||||
}
|
||||
// there is no flush promise or there was but it has finished, so do
|
||||
// a brand new one, destroying the chain which may have been built up.
|
||||
this.flushPromise = new Promise((resolve, reject) => {
|
||||
if (!this.db) {
|
||||
// not connected yet or user rejected access for us to r/w to the db.
|
||||
reject(new Error("No connected database"));
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
setState({ sending: true, sent: false, error: null });
|
||||
|
||||
let userAgent = "UNKNOWN";
|
||||
if (window.navigator && window.navigator.userAgent) {
|
||||
userAgent = window.navigator.userAgent;
|
||||
}
|
||||
|
||||
let touchInput = "UNKNOWN";
|
||||
try {
|
||||
// MDN claims broad support across browsers
|
||||
touchInput = String(window.matchMedia("(pointer: coarse)").matches);
|
||||
} catch (e) {}
|
||||
|
||||
const body = new FormData();
|
||||
body.append(
|
||||
"text",
|
||||
opts.description || "User did not supply any additional text."
|
||||
);
|
||||
body.append("app", "matrix-video-chat");
|
||||
body.append("version", import.meta.env.VITE_APP_VERSION || "dev");
|
||||
body.append("user_agent", userAgent);
|
||||
body.append("installed_pwa", false);
|
||||
body.append("touch_input", touchInput);
|
||||
|
||||
if (client) {
|
||||
const userId = client.getUserId();
|
||||
const user = client.getUser(userId);
|
||||
body.append("display_name", user?.displayName);
|
||||
body.append("user_id", client.credentials.userId);
|
||||
body.append("device_id", client.deviceId);
|
||||
|
||||
if (opts.roomId) {
|
||||
body.append("room_id", opts.roomId);
|
||||
}
|
||||
|
||||
if (client.isCryptoEnabled()) {
|
||||
const keys = [`ed25519:${client.getDeviceEd25519Key()}`];
|
||||
if (client.getDeviceCurve25519Key) {
|
||||
keys.push(`curve25519:${client.getDeviceCurve25519Key()}`);
|
||||
}
|
||||
body.append("device_keys", keys.join(", "));
|
||||
body.append("cross_signing_key", client.getCrossSigningId());
|
||||
|
||||
// add cross-signing status information
|
||||
const crossSigning = client.crypto.crossSigningInfo;
|
||||
const secretStorage = client.crypto.secretStorage;
|
||||
|
||||
body.append(
|
||||
"cross_signing_ready",
|
||||
String(await client.isCrossSigningReady())
|
||||
);
|
||||
body.append(
|
||||
"cross_signing_supported_by_hs",
|
||||
String(
|
||||
await client.doesServerSupportUnstableFeature(
|
||||
"org.matrix.e2e_cross_signing"
|
||||
)
|
||||
)
|
||||
);
|
||||
body.append("cross_signing_key", crossSigning.getId());
|
||||
body.append(
|
||||
"cross_signing_privkey_in_secret_storage",
|
||||
String(
|
||||
!!(await crossSigning.isStoredInSecretStorage(secretStorage))
|
||||
)
|
||||
);
|
||||
|
||||
const pkCache = client.getCrossSigningCacheCallbacks();
|
||||
body.append(
|
||||
"cross_signing_master_privkey_cached",
|
||||
String(
|
||||
!!(pkCache && (await pkCache.getCrossSigningKeyCache("master")))
|
||||
)
|
||||
);
|
||||
body.append(
|
||||
"cross_signing_self_signing_privkey_cached",
|
||||
String(
|
||||
!!(
|
||||
pkCache &&
|
||||
(await pkCache.getCrossSigningKeyCache("self_signing"))
|
||||
)
|
||||
)
|
||||
);
|
||||
body.append(
|
||||
"cross_signing_user_signing_privkey_cached",
|
||||
String(
|
||||
!!(
|
||||
pkCache &&
|
||||
(await pkCache.getCrossSigningKeyCache("user_signing"))
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
body.append(
|
||||
"secret_storage_ready",
|
||||
String(await client.isSecretStorageReady())
|
||||
);
|
||||
body.append(
|
||||
"secret_storage_key_in_account",
|
||||
String(!!(await secretStorage.hasKey()))
|
||||
);
|
||||
|
||||
body.append(
|
||||
"session_backup_key_in_secret_storage",
|
||||
String(!!(await client.isKeyBackupKeyStored()))
|
||||
);
|
||||
const sessionBackupKeyFromCache =
|
||||
await client.crypto.getSessionBackupPrivateKey();
|
||||
body.append(
|
||||
"session_backup_key_cached",
|
||||
String(!!sessionBackupKeyFromCache)
|
||||
);
|
||||
body.append(
|
||||
"session_backup_key_well_formed",
|
||||
String(sessionBackupKeyFromCache instanceof Uint8Array)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (opts.label) {
|
||||
body.append("label", opts.label);
|
||||
}
|
||||
|
||||
// add storage persistence/quota information
|
||||
if (navigator.storage && navigator.storage.persisted) {
|
||||
try {
|
||||
body.append(
|
||||
"storageManager_persisted",
|
||||
String(await navigator.storage.persisted())
|
||||
);
|
||||
} catch (e) {}
|
||||
} else if (document.hasStorageAccess) {
|
||||
// Safari
|
||||
try {
|
||||
body.append(
|
||||
"storageManager_persisted",
|
||||
String(await document.hasStorageAccess())
|
||||
);
|
||||
} catch (e) {}
|
||||
}
|
||||
|
||||
if (navigator.storage && navigator.storage.estimate) {
|
||||
try {
|
||||
const estimate = await navigator.storage.estimate();
|
||||
body.append("storageManager_quota", String(estimate.quota));
|
||||
body.append("storageManager_usage", String(estimate.usage));
|
||||
if (estimate.usageDetails) {
|
||||
Object.keys(estimate.usageDetails).forEach((k) => {
|
||||
body.append(
|
||||
`storageManager_usage_${k}`,
|
||||
String(estimate.usageDetails[k])
|
||||
);
|
||||
});
|
||||
}
|
||||
} catch (e) {}
|
||||
}
|
||||
|
||||
if (opts.sendLogs) {
|
||||
const logs = await rageshake.getLogsForReport();
|
||||
|
||||
for (const entry of logs) {
|
||||
// encode as UTF-8
|
||||
let buf = new TextEncoder().encode(entry.lines);
|
||||
|
||||
// compress
|
||||
buf = pako.gzip(buf);
|
||||
|
||||
body.append("compressed-log", new Blob([buf]), entry.id);
|
||||
}
|
||||
|
||||
if (json) {
|
||||
body.append(
|
||||
"file",
|
||||
new Blob([JSON.stringify(json)], { type: "text/plain" }),
|
||||
"groupcall.txt"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (opts.rageshakeRequestId) {
|
||||
body.append(
|
||||
"group_call_rageshake_request_id",
|
||||
opts.rageshakeRequestId
|
||||
);
|
||||
}
|
||||
|
||||
await fetch(
|
||||
import.meta.env.VITE_RAGESHAKE_SUBMIT_URL ||
|
||||
"https://element.io/bugreports/submit",
|
||||
{
|
||||
method: "POST",
|
||||
body,
|
||||
}
|
||||
);
|
||||
|
||||
setState({ sending: false, sent: true, error: null });
|
||||
} catch (error) {
|
||||
setState({ sending: false, sent: false, error });
|
||||
console.error(error);
|
||||
const lines = this.logger.flush();
|
||||
if (lines.length === 0) {
|
||||
resolve();
|
||||
return;
|
||||
}
|
||||
},
|
||||
[client]
|
||||
);
|
||||
const txn = this.db.transaction(["logs", "logslastmod"], "readwrite");
|
||||
const objStore = txn.objectStore("logs");
|
||||
txn.oncomplete = (event) => {
|
||||
resolve();
|
||||
};
|
||||
txn.onerror = (event) => {
|
||||
logger.error("Failed to flush logs : ", event);
|
||||
reject(new Error("Failed to write logs: " + event.target.errorCode));
|
||||
};
|
||||
objStore.add(this.generateLogEntry(lines));
|
||||
const lastModStore = txn.objectStore("logslastmod");
|
||||
lastModStore.put(this.generateLastModifiedTime());
|
||||
}).then(() => {
|
||||
this.flushPromise = null;
|
||||
});
|
||||
return this.flushPromise;
|
||||
}
|
||||
|
||||
return {
|
||||
submitRageshake,
|
||||
sending,
|
||||
sent,
|
||||
error,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Consume the most recent logs and return them. Older logs which are not
|
||||
* returned are deleted at the same time, so this can be called at startup
|
||||
* to do house-keeping to keep the logs from growing too large.
|
||||
*
|
||||
* @return {Promise<Object[]>} Resolves to an array of objects. The array is
|
||||
* sorted in time (oldest first) based on when the log file was created (the
|
||||
* log ID). The objects have said log ID in an "id" field and "lines" which
|
||||
* is a big string with all the new-line delimited logs.
|
||||
*/
|
||||
async consume() {
|
||||
const db = this.db;
|
||||
|
||||
export function useDownloadDebugLog() {
|
||||
const [{ json }] = useContext(InspectorContext);
|
||||
// Returns: a string representing the concatenated logs for this ID.
|
||||
// Stops adding log fragments when the size exceeds maxSize
|
||||
function fetchLogs(id, maxSize) {
|
||||
const objectStore = db
|
||||
.transaction("logs", "readonly")
|
||||
.objectStore("logs");
|
||||
|
||||
const downloadDebugLog = useCallback(() => {
|
||||
const blob = new Blob([JSON.stringify(json)], { type: "application/json" });
|
||||
const url = URL.createObjectURL(blob);
|
||||
const el = document.createElement("a");
|
||||
el.href = url;
|
||||
el.download = "groupcall.json";
|
||||
el.style.display = "none";
|
||||
document.body.appendChild(el);
|
||||
el.click();
|
||||
setTimeout(() => {
|
||||
URL.revokeObjectURL(url);
|
||||
el.parentNode.removeChild(el);
|
||||
}, 0);
|
||||
}, [json]);
|
||||
|
||||
return downloadDebugLog;
|
||||
}
|
||||
|
||||
export function useRageshakeRequest() {
|
||||
const { client } = useClient();
|
||||
|
||||
const sendRageshakeRequest = useCallback(
|
||||
(roomId, rageshakeRequestId) => {
|
||||
client.sendEvent(roomId, "org.matrix.rageshake_request", {
|
||||
request_id: rageshakeRequestId,
|
||||
return new Promise((resolve, reject) => {
|
||||
const query = objectStore
|
||||
.index("id")
|
||||
.openCursor(IDBKeyRange.only(id), "prev");
|
||||
let lines = "";
|
||||
query.onerror = (event) => {
|
||||
reject(new Error("Query failed: " + event.target.errorCode));
|
||||
};
|
||||
query.onsuccess = (event) => {
|
||||
const cursor = event.target.result;
|
||||
if (!cursor) {
|
||||
resolve(lines);
|
||||
return; // end of results
|
||||
}
|
||||
lines = cursor.value.lines + lines;
|
||||
if (lines.length >= maxSize) {
|
||||
resolve(lines);
|
||||
} else {
|
||||
cursor.continue();
|
||||
}
|
||||
};
|
||||
});
|
||||
},
|
||||
[client]
|
||||
);
|
||||
}
|
||||
|
||||
return sendRageshakeRequest;
|
||||
}
|
||||
// Returns: A sorted array of log IDs. (newest first)
|
||||
function fetchLogIds() {
|
||||
// To gather all the log IDs, query for all records in logslastmod.
|
||||
const o = db
|
||||
.transaction("logslastmod", "readonly")
|
||||
.objectStore("logslastmod");
|
||||
return selectQuery(o, undefined, (cursor) => {
|
||||
return {
|
||||
id: cursor.value.id,
|
||||
ts: cursor.value.ts,
|
||||
};
|
||||
}).then((res) => {
|
||||
// Sort IDs by timestamp (newest first)
|
||||
return res
|
||||
.sort((a, b) => {
|
||||
return b.ts - a.ts;
|
||||
})
|
||||
.map((a) => a.id);
|
||||
});
|
||||
}
|
||||
|
||||
export function useRageshakeRequestModal(roomId) {
|
||||
const { modalState, modalProps } = useModalTriggerState();
|
||||
const { client } = useClient();
|
||||
const [rageshakeRequestId, setRageshakeRequestId] = useState();
|
||||
function deleteLogs(id) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const txn = db.transaction(["logs", "logslastmod"], "readwrite");
|
||||
const o = txn.objectStore("logs");
|
||||
// only load the key path, not the data which may be huge
|
||||
const query = o.index("id").openKeyCursor(IDBKeyRange.only(id));
|
||||
query.onsuccess = (event) => {
|
||||
const cursor = event.target.result;
|
||||
if (!cursor) {
|
||||
return;
|
||||
}
|
||||
o.delete(cursor.primaryKey);
|
||||
cursor.continue();
|
||||
};
|
||||
txn.oncomplete = () => {
|
||||
resolve();
|
||||
};
|
||||
txn.onerror = (event) => {
|
||||
reject(
|
||||
new Error(
|
||||
"Failed to delete logs for " +
|
||||
`'${id}' : ${event.target.errorCode}`
|
||||
)
|
||||
);
|
||||
};
|
||||
// delete last modified entries
|
||||
const lastModStore = txn.objectStore("logslastmod");
|
||||
lastModStore.delete(id);
|
||||
});
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
const onEvent = (event) => {
|
||||
const type = event.getType();
|
||||
const allLogIds = await fetchLogIds();
|
||||
let removeLogIds = [];
|
||||
const logs = [];
|
||||
let size = 0;
|
||||
for (let i = 0; i < allLogIds.length; i++) {
|
||||
const lines = await fetchLogs(allLogIds[i], MAX_LOG_SIZE - size);
|
||||
|
||||
if (
|
||||
type === "org.matrix.rageshake_request" &&
|
||||
roomId === event.getRoomId() &&
|
||||
client.getUserId() !== event.getSender()
|
||||
) {
|
||||
setRageshakeRequestId(event.getContent().request_id);
|
||||
modalState.open();
|
||||
// always add the log file: fetchLogs will truncate once the maxSize we give it is
|
||||
// exceeded, so we'll go over the max but only by one fragment's worth.
|
||||
logs.push({
|
||||
lines: lines,
|
||||
id: allLogIds[i],
|
||||
});
|
||||
size += lines.length;
|
||||
|
||||
// If fetchLogs truncated we'll now be at or over the size limit,
|
||||
// in which case we should stop and remove the rest of the log files.
|
||||
if (size >= MAX_LOG_SIZE) {
|
||||
// the remaining log IDs should be removed. If we go out of
|
||||
// bounds this is just []
|
||||
removeLogIds = allLogIds.slice(i + 1);
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (removeLogIds.length > 0) {
|
||||
logger.log("Removing logs: ", removeLogIds);
|
||||
// Don't await this because it's non-fatal if we can't clean up
|
||||
// logs.
|
||||
Promise.all(removeLogIds.map((id) => deleteLogs(id))).then(
|
||||
() => {
|
||||
logger.log(`Removed ${removeLogIds.length} old logs.`);
|
||||
},
|
||||
(err) => {
|
||||
logger.error(err);
|
||||
}
|
||||
);
|
||||
}
|
||||
return logs;
|
||||
}
|
||||
|
||||
generateLogEntry(lines) {
|
||||
return {
|
||||
id: this.id,
|
||||
lines: lines,
|
||||
index: this.index++,
|
||||
};
|
||||
}
|
||||
|
||||
client.on("event", onEvent);
|
||||
|
||||
return () => {
|
||||
client.removeListener("event", onEvent);
|
||||
generateLastModifiedTime() {
|
||||
return {
|
||||
id: this.id,
|
||||
ts: Date.now(),
|
||||
};
|
||||
}, [modalState.open, roomId]);
|
||||
|
||||
return { modalState, modalProps: { ...modalProps, rageshakeRequestId } };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method to collect results from a Cursor and promiseify it.
|
||||
* @param {ObjectStore|Index} store The store to perform openCursor on.
|
||||
* @param {IDBKeyRange=} keyRange Optional key range to apply on the cursor.
|
||||
* @param {Function} resultMapper A function which is repeatedly called with a
|
||||
* Cursor.
|
||||
* Return the data you want to keep.
|
||||
* @return {Promise<T[]>} Resolves to an array of whatever you returned from
|
||||
* resultMapper.
|
||||
*/
|
||||
function selectQuery(store, keyRange, resultMapper) {
|
||||
const query = store.openCursor(keyRange);
|
||||
return new Promise((resolve, reject) => {
|
||||
const results = [];
|
||||
query.onerror = (event) => {
|
||||
// @ts-ignore
|
||||
reject(new Error("Query failed: " + event.target.errorCode));
|
||||
};
|
||||
// collect results
|
||||
query.onsuccess = (event) => {
|
||||
// @ts-ignore
|
||||
const cursor = event.target.result;
|
||||
if (!cursor) {
|
||||
resolve(results);
|
||||
return; // end of results
|
||||
}
|
||||
results.push(resultMapper(cursor));
|
||||
cursor.continue();
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure rage shaking support for sending bug reports.
|
||||
* Modifies globals.
|
||||
* @param {boolean} setUpPersistence When true (default), the persistence will
|
||||
* be set up immediately for the logs.
|
||||
* @return {Promise} Resolves when set up.
|
||||
*/
|
||||
export function init(setUpPersistence = true) {
|
||||
if (global.mx_rage_initPromise) {
|
||||
return global.mx_rage_initPromise;
|
||||
}
|
||||
global.mx_rage_logger = new ConsoleLogger();
|
||||
global.mx_rage_logger.monkeyPatch(window.console);
|
||||
|
||||
if (setUpPersistence) {
|
||||
return tryInitStorage();
|
||||
}
|
||||
|
||||
global.mx_rage_initPromise = Promise.resolve();
|
||||
return global.mx_rage_initPromise;
|
||||
}
|
||||
|
||||
/**
|
||||
* Try to start up the rageshake storage for logs. If not possible (client unsupported)
|
||||
* then this no-ops.
|
||||
* @return {Promise} Resolves when complete.
|
||||
*/
|
||||
export function tryInitStorage() {
|
||||
if (global.mx_rage_initStoragePromise) {
|
||||
return global.mx_rage_initStoragePromise;
|
||||
}
|
||||
|
||||
logger.log("Configuring rageshake persistence...");
|
||||
|
||||
// just *accessing* indexedDB throws an exception in firefox with
|
||||
// indexeddb disabled.
|
||||
let indexedDB;
|
||||
try {
|
||||
indexedDB = window.indexedDB;
|
||||
} catch (e) {}
|
||||
|
||||
if (indexedDB) {
|
||||
global.mx_rage_store = new IndexedDBLogStore(
|
||||
indexedDB,
|
||||
global.mx_rage_logger
|
||||
);
|
||||
global.mx_rage_initStoragePromise = global.mx_rage_store.connect();
|
||||
return global.mx_rage_initStoragePromise;
|
||||
}
|
||||
global.mx_rage_initStoragePromise = Promise.resolve();
|
||||
return global.mx_rage_initStoragePromise;
|
||||
}
|
||||
|
||||
export function flush() {
|
||||
if (!global.mx_rage_store) {
|
||||
return;
|
||||
}
|
||||
global.mx_rage_store.flush();
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up old logs.
|
||||
* @return {Promise} Resolves if cleaned logs.
|
||||
*/
|
||||
export async function cleanup() {
|
||||
if (!global.mx_rage_store) {
|
||||
return;
|
||||
}
|
||||
await global.mx_rage_store.consume();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a recent snapshot of the logs, ready for attaching to a bug report
|
||||
*
|
||||
* @return {Array<{lines: string, id, string}>} list of log data
|
||||
*/
|
||||
export async function getLogsForReport() {
|
||||
if (!global.mx_rage_logger) {
|
||||
throw new Error("No console logger, did you forget to call init()?");
|
||||
}
|
||||
// If in incognito mode, store is null, but we still want bug report
|
||||
// sending to work going off the in-memory console logs.
|
||||
if (global.mx_rage_store) {
|
||||
// flush most recent logs
|
||||
await global.mx_rage_store.flush();
|
||||
return await global.mx_rage_store.consume();
|
||||
} else {
|
||||
return [
|
||||
{
|
||||
lines: global.mx_rage_logger.flush(true),
|
||||
id: "-",
|
||||
},
|
||||
];
|
||||
}
|
||||
}
|
||||
|
||||
300
src/settings/submit-rageshake.js
Normal file
300
src/settings/submit-rageshake.js
Normal file
@@ -0,0 +1,300 @@
|
||||
import { useCallback, useContext, useEffect, useState } from "react";
|
||||
import { getLogsForReport } from "./rageshake";
|
||||
import pako from "pako";
|
||||
import { useClient } from "../ClientContext";
|
||||
import { InspectorContext } from "../room/GroupCallInspector";
|
||||
import { useModalTriggerState } from "../Modal";
|
||||
|
||||
export function useSubmitRageshake() {
|
||||
const { client } = useClient();
|
||||
const [{ json }] = useContext(InspectorContext);
|
||||
|
||||
const [{ sending, sent, error }, setState] = useState({
|
||||
sending: false,
|
||||
sent: false,
|
||||
error: null,
|
||||
});
|
||||
|
||||
const submitRageshake = useCallback(
|
||||
async (opts) => {
|
||||
if (sending) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
setState({ sending: true, sent: false, error: null });
|
||||
|
||||
let userAgent = "UNKNOWN";
|
||||
if (window.navigator && window.navigator.userAgent) {
|
||||
userAgent = window.navigator.userAgent;
|
||||
}
|
||||
|
||||
let touchInput = "UNKNOWN";
|
||||
try {
|
||||
// MDN claims broad support across browsers
|
||||
touchInput = String(window.matchMedia("(pointer: coarse)").matches);
|
||||
} catch (e) {}
|
||||
|
||||
const body = new FormData();
|
||||
body.append(
|
||||
"text",
|
||||
opts.description || "User did not supply any additional text."
|
||||
);
|
||||
body.append("app", "matrix-video-chat");
|
||||
body.append("version", import.meta.env.VITE_APP_VERSION || "dev");
|
||||
body.append("user_agent", userAgent);
|
||||
body.append("installed_pwa", false);
|
||||
body.append("touch_input", touchInput);
|
||||
|
||||
if (client) {
|
||||
const userId = client.getUserId();
|
||||
const user = client.getUser(userId);
|
||||
body.append("display_name", user?.displayName);
|
||||
body.append("user_id", client.credentials.userId);
|
||||
body.append("device_id", client.deviceId);
|
||||
|
||||
if (opts.roomId) {
|
||||
body.append("room_id", opts.roomId);
|
||||
}
|
||||
|
||||
if (client.isCryptoEnabled()) {
|
||||
const keys = [`ed25519:${client.getDeviceEd25519Key()}`];
|
||||
if (client.getDeviceCurve25519Key) {
|
||||
keys.push(`curve25519:${client.getDeviceCurve25519Key()}`);
|
||||
}
|
||||
body.append("device_keys", keys.join(", "));
|
||||
body.append("cross_signing_key", client.getCrossSigningId());
|
||||
|
||||
// add cross-signing status information
|
||||
const crossSigning = client.crypto.crossSigningInfo;
|
||||
const secretStorage = client.crypto.secretStorage;
|
||||
|
||||
body.append(
|
||||
"cross_signing_ready",
|
||||
String(await client.isCrossSigningReady())
|
||||
);
|
||||
body.append(
|
||||
"cross_signing_supported_by_hs",
|
||||
String(
|
||||
await client.doesServerSupportUnstableFeature(
|
||||
"org.matrix.e2e_cross_signing"
|
||||
)
|
||||
)
|
||||
);
|
||||
body.append("cross_signing_key", crossSigning.getId());
|
||||
body.append(
|
||||
"cross_signing_privkey_in_secret_storage",
|
||||
String(
|
||||
!!(await crossSigning.isStoredInSecretStorage(secretStorage))
|
||||
)
|
||||
);
|
||||
|
||||
const pkCache = client.getCrossSigningCacheCallbacks();
|
||||
body.append(
|
||||
"cross_signing_master_privkey_cached",
|
||||
String(
|
||||
!!(pkCache && (await pkCache.getCrossSigningKeyCache("master")))
|
||||
)
|
||||
);
|
||||
body.append(
|
||||
"cross_signing_self_signing_privkey_cached",
|
||||
String(
|
||||
!!(
|
||||
pkCache &&
|
||||
(await pkCache.getCrossSigningKeyCache("self_signing"))
|
||||
)
|
||||
)
|
||||
);
|
||||
body.append(
|
||||
"cross_signing_user_signing_privkey_cached",
|
||||
String(
|
||||
!!(
|
||||
pkCache &&
|
||||
(await pkCache.getCrossSigningKeyCache("user_signing"))
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
body.append(
|
||||
"secret_storage_ready",
|
||||
String(await client.isSecretStorageReady())
|
||||
);
|
||||
body.append(
|
||||
"secret_storage_key_in_account",
|
||||
String(!!(await secretStorage.hasKey()))
|
||||
);
|
||||
|
||||
body.append(
|
||||
"session_backup_key_in_secret_storage",
|
||||
String(!!(await client.isKeyBackupKeyStored()))
|
||||
);
|
||||
const sessionBackupKeyFromCache =
|
||||
await client.crypto.getSessionBackupPrivateKey();
|
||||
body.append(
|
||||
"session_backup_key_cached",
|
||||
String(!!sessionBackupKeyFromCache)
|
||||
);
|
||||
body.append(
|
||||
"session_backup_key_well_formed",
|
||||
String(sessionBackupKeyFromCache instanceof Uint8Array)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (opts.label) {
|
||||
body.append("label", opts.label);
|
||||
}
|
||||
|
||||
// add storage persistence/quota information
|
||||
if (navigator.storage && navigator.storage.persisted) {
|
||||
try {
|
||||
body.append(
|
||||
"storageManager_persisted",
|
||||
String(await navigator.storage.persisted())
|
||||
);
|
||||
} catch (e) {}
|
||||
} else if (document.hasStorageAccess) {
|
||||
// Safari
|
||||
try {
|
||||
body.append(
|
||||
"storageManager_persisted",
|
||||
String(await document.hasStorageAccess())
|
||||
);
|
||||
} catch (e) {}
|
||||
}
|
||||
|
||||
if (navigator.storage && navigator.storage.estimate) {
|
||||
try {
|
||||
const estimate = await navigator.storage.estimate();
|
||||
body.append("storageManager_quota", String(estimate.quota));
|
||||
body.append("storageManager_usage", String(estimate.usage));
|
||||
if (estimate.usageDetails) {
|
||||
Object.keys(estimate.usageDetails).forEach((k) => {
|
||||
body.append(
|
||||
`storageManager_usage_${k}`,
|
||||
String(estimate.usageDetails[k])
|
||||
);
|
||||
});
|
||||
}
|
||||
} catch (e) {}
|
||||
}
|
||||
|
||||
if (opts.sendLogs) {
|
||||
const logs = await getLogsForReport();
|
||||
|
||||
for (const entry of logs) {
|
||||
// encode as UTF-8
|
||||
let buf = new TextEncoder().encode(entry.lines);
|
||||
|
||||
// compress
|
||||
buf = pako.gzip(buf);
|
||||
|
||||
body.append("compressed-log", new Blob([buf]), entry.id);
|
||||
}
|
||||
|
||||
if (json) {
|
||||
body.append(
|
||||
"file",
|
||||
new Blob([JSON.stringify(json)], { type: "text/plain" }),
|
||||
"groupcall.txt"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (opts.rageshakeRequestId) {
|
||||
body.append(
|
||||
"group_call_rageshake_request_id",
|
||||
opts.rageshakeRequestId
|
||||
);
|
||||
}
|
||||
|
||||
await fetch(
|
||||
import.meta.env.VITE_RAGESHAKE_SUBMIT_URL ||
|
||||
"https://element.io/bugreports/submit",
|
||||
{
|
||||
method: "POST",
|
||||
body,
|
||||
}
|
||||
);
|
||||
|
||||
setState({ sending: false, sent: true, error: null });
|
||||
} catch (error) {
|
||||
setState({ sending: false, sent: false, error });
|
||||
console.error(error);
|
||||
}
|
||||
},
|
||||
[client]
|
||||
);
|
||||
|
||||
return {
|
||||
submitRageshake,
|
||||
sending,
|
||||
sent,
|
||||
error,
|
||||
};
|
||||
}
|
||||
|
||||
export function useDownloadDebugLog() {
|
||||
const [{ json }] = useContext(InspectorContext);
|
||||
|
||||
const downloadDebugLog = useCallback(() => {
|
||||
const blob = new Blob([JSON.stringify(json)], { type: "application/json" });
|
||||
const url = URL.createObjectURL(blob);
|
||||
const el = document.createElement("a");
|
||||
el.href = url;
|
||||
el.download = "groupcall.json";
|
||||
el.style.display = "none";
|
||||
document.body.appendChild(el);
|
||||
el.click();
|
||||
setTimeout(() => {
|
||||
URL.revokeObjectURL(url);
|
||||
el.parentNode.removeChild(el);
|
||||
}, 0);
|
||||
}, [json]);
|
||||
|
||||
return downloadDebugLog;
|
||||
}
|
||||
|
||||
export function useRageshakeRequest() {
|
||||
const { client } = useClient();
|
||||
|
||||
const sendRageshakeRequest = useCallback(
|
||||
(roomId, rageshakeRequestId) => {
|
||||
client.sendEvent(roomId, "org.matrix.rageshake_request", {
|
||||
request_id: rageshakeRequestId,
|
||||
});
|
||||
},
|
||||
[client]
|
||||
);
|
||||
|
||||
return sendRageshakeRequest;
|
||||
}
|
||||
|
||||
export function useRageshakeRequestModal(roomId) {
|
||||
const { modalState, modalProps } = useModalTriggerState();
|
||||
const { client } = useClient();
|
||||
const [rageshakeRequestId, setRageshakeRequestId] = useState();
|
||||
|
||||
useEffect(() => {
|
||||
const onEvent = (event) => {
|
||||
const type = event.getType();
|
||||
|
||||
if (
|
||||
type === "org.matrix.rageshake_request" &&
|
||||
roomId === event.getRoomId() &&
|
||||
client.getUserId() !== event.getSender()
|
||||
) {
|
||||
setRageshakeRequestId(event.getContent().request_id);
|
||||
modalState.open();
|
||||
}
|
||||
};
|
||||
|
||||
client.on("event", onEvent);
|
||||
|
||||
return () => {
|
||||
client.removeListener("event", onEvent);
|
||||
};
|
||||
}, [modalState.open, roomId]);
|
||||
|
||||
return { modalState, modalProps: { ...modalProps, rageshakeRequestId } };
|
||||
}
|
||||
Reference in New Issue
Block a user