fix: resolve TypeScript errors in frontend build

This commit is contained in:
Hiro
2026-03-30 23:16:07 +00:00
parent b733306773
commit 24925e1acb
2941 changed files with 418042 additions and 49 deletions

View File

@@ -0,0 +1,48 @@
import ist from "ist"
import {schema, doc, p} from "prosemirror-test-builder"
import {Transform} from "prosemirror-transform"
import {Node} from "prosemirror-model"
import {ChangeSet} from "prosemirror-changeset"
function mk(doc: Node, change: (tr: Transform) => Transform): {doc0: Node, tr: Transform, data: any[], set0: ChangeSet, set: ChangeSet} {
let tr = change(new Transform(doc))
let data = new Array(tr.steps.length).fill("a")
let set0 = ChangeSet.create(doc)
return {doc0: doc, tr, data, set0,
set: set0.addSteps(tr.doc, tr.mapping.maps, data)}
}
function same(a: any, b: any) {
ist(JSON.stringify(a), JSON.stringify(b))
}
describe("ChangeSet.changedRange", () => {
it("returns null for identical sets", () => {
let {set, doc0, tr, data} = mk(doc(p("foo")), tr => tr
.replaceWith(2, 3, schema.text("aaaa"))
.replaceWith(1, 1, schema.text("xx"))
.delete(5, 7))
ist(set.changedRange(set), null)
ist(set.changedRange(ChangeSet.create(doc0).addSteps(tr.doc, tr.mapping.maps, data)), null)
})
it("returns only the changed range in simple cases", () => {
let {set0, set, tr} = mk(doc(p("abcd")), tr => tr.replaceWith(2, 4, schema.text("u")))
same(set0.changedRange(set, tr.mapping.maps), {from: 2, to: 3})
})
it("expands to cover updated spans", () => {
let {doc0, set0, set, tr} = mk(doc(p("abcd")), tr => tr
.replaceWith(2, 2, schema.text("c"))
.delete(3, 5))
let set1 = ChangeSet.create(doc0).addSteps(tr.docs[1], [tr.mapping.maps[0]], ["a"])
same(set0.changedRange(set1, [tr.mapping.maps[0]]), {from: 2, to: 3})
same(set1.changedRange(set, [tr.mapping.maps[1]]), {from: 2, to: 3})
})
it("detects changes in deletions", () => {
let {set} = mk(doc(p("abc")), tr => tr.delete(2, 3))
same(set.changedRange(set.map(() => "b")), {from: 2, to: 2})
})
})

211
node_modules/prosemirror-changeset/test/test-changes.ts generated vendored Normal file
View File

@@ -0,0 +1,211 @@
import ist from "ist"
import {schema, doc, p, blockquote, h1} from "prosemirror-test-builder"
import {Transform} from "prosemirror-transform"
import {Node} from "prosemirror-model"
import {ChangeSet} from "prosemirror-changeset"
describe("ChangeSet", () => {
it("finds a single insertion",
find(doc(p("hello")), tr => tr.insert(3, t("XY")), [[3, 3, 3, 5]]))
it("finds a single deletion",
find(doc(p("hello")), tr => tr.delete(3, 5), [[3, 5, 3, 3]]))
it("identifies a replacement",
find(doc(p("hello")), tr => tr.replaceWith(3, 5, t("juj")),
[[3, 5, 3, 6]]))
it("merges adjacent canceling edits",
find(doc(p("hello")),
tr => tr.delete(3, 5).insert(3, t("ll")),
[]))
it("doesn't crash when cancelling edits are followed by others",
find(doc(p("hello")),
tr => tr.delete(2, 3).insert(2, t("e")).delete(5, 6),
[[5, 6, 5, 5]]))
it("stops handling an inserted span after collapsing it",
find(doc(p("abcba")), tr => tr.insert(2, t("b")).insert(6, t("b")).delete(3, 6),
[[3, 4, 3, 3]]))
it("partially merges insert at start",
find(doc(p("helLo")), tr => tr.delete(3, 5).insert(3, t("l")),
[[4, 5, 4, 4]]))
it("partially merges insert at end",
find(doc(p("helLo")), tr => tr.delete(3, 5).insert(3, t("L")),
[[3, 4, 3, 3]]))
it("partially merges delete at start",
find(doc(p("abc")), tr => tr.insert(3, t("xyz")).delete(3, 4),
[[3, 3, 3, 5]]))
it("partially merges delete at end",
find(doc(p("abc")), tr => tr.insert(3, t("xyz")).delete(5, 6),
[[3, 3, 3, 5]]))
it("finds multiple insertions",
find(doc(p("abc")), tr => tr.insert(1, t("x")).insert(5, t("y")),
[[1, 1, 1, 2], [4, 4, 5, 6]]))
it("finds multiple deletions",
find(doc(p("xyz")), tr => tr.delete(1, 2).delete(2, 3),
[[1, 2, 1, 1], [3, 4, 2, 2]]))
it("identifies a deletion between insertions",
find(doc(p("zyz")), tr => tr.insert(2, t("A")).insert(4, t("B")).delete(3, 4),
[[2, 3, 2, 4]]))
it("can add a deletion in a new addStep call", find(doc(p("hello")), [
tr => tr.delete(1, 2),
tr => tr.delete(2, 3)
], [[1, 2, 1, 1], [3, 4, 2, 2]]))
it("merges delete/insert from different addStep calls", find(doc(p("hello")), [
tr => tr.delete(3, 5),
tr => tr.insert(3, t("ll"))
], []))
it("revert a deletion by inserting the character again", find(doc(p("bar")), [
tr => tr.delete(2, 3), // br
tr => tr.insert(2, t("x")), // bxr
tr => tr.insert(2, t("a")) // baxr
], [[3, 3, 3, 4]]))
it("insert character before changed character", find(doc(p("bar")), [
tr => tr.delete(2, 3), // br
tr => tr.insert(2, t("x")), // bxr
tr => tr.insert(2, t("x")) // bxxr
], [[2, 3, 2, 4]]))
it("partially merges delete/insert from different addStep calls", find(doc(p("heljo")), [
tr => tr.delete(3, 5),
tr => tr.insert(3, t("ll"))
], [[4, 5, 4, 5]]))
it("merges insert/delete from different addStep calls", find(doc(p("ok")), [
tr => tr.insert(2, t("--")),
tr => tr.delete(2, 4)
], []))
it("partially merges insert/delete from different addStep calls", find(doc(p("ok")), [
tr => tr.insert(2, t("--")),
tr => tr.delete(2, 3)
], [[2, 2, 2, 3]]))
it("maps deletions forward", find(doc(p("foobar")), [
tr => tr.delete(5, 6),
tr => tr.insert(1, t("OKAY"))
], [[1, 1, 1, 5], [5, 6, 9, 9]]))
it("can incrementally undo then redo", find(doc(p("bar")), [
tr => tr.delete(2, 3),
tr => tr.insert(2, t("a")),
tr => tr.delete(2, 3)
], [[2, 3, 2, 2]]))
it("can map through complicated changesets", find(doc(p("12345678901234")), [
tr => tr.delete(9, 12).insert(6, t("xyz")).replaceWith(2, 3, t("uv")),
tr => tr.delete(14, 15).insert(13, t("90")).delete(8, 9)
], [[2, 3, 2, 4], [6, 6, 7, 9], [11, 12, 14, 14], [13, 14, 15, 15]]))
it("computes a proper diff of the changes",
find(doc(p("abcd"), p("efgh")), tr => tr.delete(2, 10).insert(2, t("cdef")),
[[2, 3, 2, 2], [5, 7, 4, 4], [9, 10, 6, 6]]))
it("handles re-adding content step by step", find(doc(p("one two three")), [
tr => tr.delete(1, 14),
tr => tr.insert(1, t("two")),
tr => tr.insert(4, t(" ")),
tr => tr.insert(5, t("three"))
], [[1, 5, 1, 1]]))
it("doesn't get confused by split deletions", find(doc(blockquote(h1("one"), p("two four"))), [
tr => tr.delete(7, 11),
tr => tr.replaceWith(0, 13, blockquote(h1("one"), p("four")))
], [[7, 11, 7, 7, [[4, 0]], []]], true))
it("doesn't get confused by multiply split deletions", find(doc(blockquote(h1("one"), p("two three"))), [
tr => tr.delete(14, 16),
tr => tr.delete(7, 11),
tr => tr.delete(3, 5),
tr => tr.replaceWith(0, 10, blockquote(h1("o"), p("thr")))
], [[3, 5, 3, 3, [[2, 2]], []], [8, 12, 6, 6, [[3, 1], [1, 3]], []],
[14, 16, 8, 8, [[2, 0]], []]], true))
it("won't lose the order of overlapping changes", find(doc(p("12345")), [
tr => tr.delete(4, 5),
tr => tr.replaceWith(2, 2, t("a")),
tr => tr.delete(1, 6),
tr => tr.replaceWith(1, 1, t("1a235"))
], [[2, 2, 2, 3, [], [[1, 1]]], [4, 5, 5, 5, [[1, 0]], []]], [0, 0, 1, 1]))
it("properly maps deleted positions", find(doc(p("jTKqvPrzApX")), [
tr => tr.delete(8, 11),
tr => tr.replaceWith(1, 1, t("MPu")),
tr => tr.delete(2, 12),
tr => tr.replaceWith(2, 2, t("PujTKqvPrX"))
], [[1, 1, 1, 4, [], [[3, 2]]], [8, 11, 11, 11, [[3, 1]], []]], [1, 2, 2, 2]))
it("fuzz issue 1", find(doc(p("hzwiKqBPzn")), [
tr => tr.delete(3, 7),
tr => tr.replaceWith(5, 5, t("LH")),
tr => tr.replaceWith(6, 6, t("uE")),
tr => tr.delete(1, 6),
tr => tr.delete(3, 6)
], [[1, 11, 1, 3, [[2, 1], [4, 0], [2, 1], [2, 0]], [[2, 0]]]], [0, 1, 0, 1, 0]))
it("fuzz issue 2", find(doc(p("eAMISWgauf")), [
tr => tr.delete(5, 10),
tr => tr.replaceWith(5, 5, t("KkM")),
tr => tr.replaceWith(3, 3, t("UDO")),
tr => tr.delete(1, 12),
tr => tr.replaceWith(1, 1, t("eAUDOMIKkMf")),
tr => tr.delete(5, 8),
tr => tr.replaceWith(3, 3, t("qX"))
], [[3, 10, 3, 10, [[2, 0], [5, 2]], [[7, 0]]]], [2, 0, 0, 0, 0, 0, 0]))
it("fuzz issue 3", find(doc(p("hfxjahnOuH")), [
tr => tr.delete(1, 5),
tr => tr.replaceWith(3, 3, t("X")),
tr => tr.delete(1, 8),
tr => tr.replaceWith(1, 1, t("ahXnOuH")),
tr => tr.delete(2, 4),
tr => tr.replaceWith(2, 2, t("tn")),
tr => tr.delete(5, 7),
tr => tr.delete(1, 6),
tr => tr.replaceWith(1, 1, t("atnnH")),
tr => tr.delete(2, 6)
], [[1, 11, 1, 2, [[4, 1], [1, 0], [1, 1], [1, 0], [2, 1], [1, 0]], [[1, 0]]]], [1, 0, 1, 1, 1, 1, 1, 0, 0, 0]))
it("correctly handles steps with multiple map entries", find(doc(p()), [
tr => tr.replaceWith(1, 1, t("ab")),
tr => tr.wrap(tr.doc.resolve(1).blockRange()!, [{type: schema.nodes.blockquote}])
], [[0, 0, 0, 1], [1, 1, 2, 4], [2, 2, 5, 6]]))
})
function find(doc: Node, build: ((tr: Transform) => void) | ((tr: Transform) => void)[],
changes: any[], sep?: number[] | boolean) {
return () => {
let set = ChangeSet.create(doc), curDoc = doc
if (!Array.isArray(build)) build = [build]
build.forEach((build, i) => {
let tr = new Transform(curDoc)
build(tr)
set = set.addSteps(tr.doc, tr.mapping.maps, !sep ? 0 : Array.isArray(sep) ? sep[i] : i)
curDoc = tr.doc
})
let owner = sep && changes.length && changes[0].length > 4
ist(JSON.stringify(set.changes.map(ch => {
let range: any[] = [ch.fromA, ch.toA, ch.fromB, ch.toB]
if (owner) range.push(ch.deleted.map(d => [d.length, d.data]),
ch.inserted.map(d => [d.length, d.data]))
return range
})), JSON.stringify(changes))
}
}
function t(str: string) { return schema.text(str) }

69
node_modules/prosemirror-changeset/test/test-diff.ts generated vendored Normal file
View File

@@ -0,0 +1,69 @@
import ist from "ist"
import {doc, p, em, strong, h1, h2} from "prosemirror-test-builder"
import {Node} from "prosemirror-model"
import {Span, Change, ChangeSet} from "prosemirror-changeset"
const {computeDiff} = ChangeSet
describe("computeDiff", () => {
function test(doc1: Node, doc2: Node, ...ranges: number[][]) {
let diff = computeDiff(doc1.content, doc2.content,
new Change(0, doc1.content.size, 0, doc2.content.size,
[new Span(doc1.content.size, 0)],
[new Span(doc2.content.size, 0)]))
ist(JSON.stringify(diff.map(r => [r.fromA, r.toA, r.fromB, r.toB])), JSON.stringify(ranges))
}
it("returns an empty diff for identical documents", () =>
test(doc(p("foo"), p("bar")), doc(p("foo"), p("bar"))))
it("finds single-letter changes", () =>
test(doc(p("foo"), p("bar")), doc(p("foa"), p("bar")),
[3, 4, 3, 4]))
it("finds simple structure changes", () =>
test(doc(p("foo"), p("bar")), doc(p("foobar")),
[4, 6, 4, 4]))
it("finds multiple changes", () =>
test(doc(p("foo"), p("---bar")), doc(p("fgo"), p("---bur")),
[2, 4, 2, 4], [10, 11, 10, 11]))
it("ignores single-letter unchanged parts", () =>
test(doc(p("abcdef")), doc(p("axydzf")), [2, 6, 2, 6]))
it("ignores matching substrings in longer diffs", () =>
test(doc(p("One two three")), doc(p("One"), p("And another long paragraph that has wo and ee in it")),
[4, 14, 4, 57]))
it("finds deletions", () =>
test(doc(p("abc"), p("def")), doc(p("ac"), p("d")),
[2, 3, 2, 2], [7, 9, 6, 6]))
it("ignores marks", () =>
test(doc(p("abc")), doc(p(em("a"), strong("bc")))))
it("ignores marks in diffing", () =>
test(doc(p("abcdefghi")), doc(p(em("x"), strong("bc"), "defgh", em("y"))),
[1, 2, 1, 2], [9, 10, 9, 10]))
it("ignores attributes", () =>
test(doc(h1("x")), doc(h2("x"))))
it("finds huge deletions", () => {
let xs = "x".repeat(200), bs = "b".repeat(20)
test(doc(p("a" + bs + "c")), doc(p("a" + xs + bs + xs + "c")),
[2, 2, 2, 202], [22, 22, 222, 422])
})
it("finds huge insertions", () => {
let xs = "x".repeat(200), bs = "b".repeat(20)
test(doc(p("a" + xs + bs + xs + "c")), doc(p("a" + bs + "c")),
[2, 202, 2, 2], [222, 422, 22, 22])
})
it("can handle ambiguous diffs", () =>
test(doc(p("abcbcd")), doc(p("abcd")), [4, 6, 4, 4]))
it("sees the difference between different closing tokens", () =>
test(doc(p("a")), doc(h1("oo")), [0, 3, 0, 4]))
})

56
node_modules/prosemirror-changeset/test/test-merge.ts generated vendored Normal file
View File

@@ -0,0 +1,56 @@
import ist from "ist"
import {Change, Span} from "prosemirror-changeset"
describe("mergeChanges", () => {
it("can merge simple insertions", () => test(
[[1, 1, 1, 2]], [[1, 1, 1, 2]], [[1, 1, 1, 3]]
))
it("can merge simple deletions", () => test(
[[1, 2, 1, 1]], [[1, 2, 1, 1]], [[1, 3, 1, 1]]
))
it("can merge insertion before deletion", () => test(
[[2, 3, 2, 2]], [[1, 1, 1, 2]], [[1, 1, 1, 2], [2, 3, 3, 3]]
))
it("can merge insertion after deletion", () => test(
[[2, 3, 2, 2]], [[2, 2, 2, 3]], [[2, 3, 2, 3]]
))
it("can merge deletion before insertion", () => test(
[[2, 2, 2, 3]], [[1, 2, 1, 1]], [[1, 2, 1, 2]]
))
it("can merge deletion after insertion", () => test(
[[2, 2, 2, 3]], [[3, 4, 3, 3]], [[2, 3, 2, 3]]
))
it("can merge deletion of insertion", () => test(
[[2, 2, 2, 3]], [[2, 3, 2, 2]], []
))
it("can merge insertion after replace", () => test(
[[2, 3, 2, 3]], [[3, 3, 3, 4]], [[2, 3, 2, 4]]
))
it("can merge insertion before replace", () => test(
[[2, 3, 2, 3]], [[2, 2, 2, 3]], [[2, 3, 2, 4]]
))
it("can merge replace after insert", () => test(
[[2, 2, 2, 3]], [[2, 3, 2, 3]], [[2, 2, 2, 3]]
))
})
function range(array: number[], author = 0) {
let [fromA, toA] = array
let [fromB, toB] = array.length > 2 ? array.slice(2) : array
return new Change(fromA, toA, fromB, toB, [new Span(toA - fromA, author)], [new Span(toB - fromB, author)])
}
function test(changeA: number[][], changeB: number[][], expected: number[][]) {
const result = Change.merge(changeA.map(range), changeB.map(range), a => a)
.map(r => [r.fromA, r.toA, r.fromB, r.toB])
ist(JSON.stringify(result), JSON.stringify(expected))
}

View File

@@ -0,0 +1,71 @@
import ist from "ist"
import {doc, p, img} from "prosemirror-test-builder"
import {Node} from "prosemirror-model"
import {simplifyChanges, Change, Span} from "prosemirror-changeset"
describe("simplifyChanges", () => {
it("doesn't change insertion-only changes", () => test(
[[1, 1, 1, 2], [2, 2, 3, 4]], doc(p("hello")), [[1, 1, 1, 2], [2, 2, 3, 4]]))
it("doesn't change deletion-only changes", () => test(
[[1, 2, 1, 1], [3, 4, 2, 2]], doc(p("hello")), [[1, 2, 1, 1], [3, 4, 2, 2]]))
it("doesn't change single-letter-replacements", () => test(
[[1, 2, 1, 2]], doc(p("hello")), [[1, 2, 1, 2]]))
it("does expand multiple-letter replacements", () => test(
[[2, 4, 2, 4]], doc(p("hello")), [[1, 6, 1, 6]]))
it("does combine changes within the same word", () => test(
[[1, 3, 1, 1], [5, 5, 3, 4]], doc(p("hello")), [[1, 7, 1, 6]]))
it("expands changes to cover full words", () => test(
[[7, 10]], doc(p("one two three four")), [[5, 14]]))
it("doesn't expand across non-word text", () => test(
[[7, 10]], doc(p("one two ----- four")), [[5, 10]]))
it("treats leaf nodes as non-words", () => test(
[[2, 3], [6, 7]], doc(p("one", img(), "two")), [[2, 3], [6, 7]]))
it("treats node boundaries as non-words", () => test(
[[2, 3], [7, 8]], doc(p("one"), p("two")), [[2, 3], [7, 8]]))
it("can merge stretches of changes", () => test(
[[2, 3], [4, 6], [8, 10], [15, 16]], doc(p("foo bar baz bug ugh")), [[1, 12], [15, 16]]))
it("handles realistic word updates", () => test(
[[8, 8, 8, 11], [10, 15, 13, 17]], doc(p("chonic condition")), [[8, 15, 8, 17]]))
it("works when after significant content", () => test(
[[63, 80, 63, 83]], doc(p("one long paragraph -----"), p("two long paragraphs ------"), p("a vote against the government")),
[[62, 81, 62, 84]]))
it("joins changes that grow together when simplifying", () => test(
[[1, 5, 1, 5], [7, 13, 7, 9], [20, 21, 16, 16]], doc(p('and his co-star')),
[[1, 13, 1, 9], [20, 21, 16, 16]]))
it("properly fills in metadata", () => {
let simple = simplifyChanges([range([2, 3], 0), range([4, 6], 1), range([8, 9, 8, 8], 2)],
doc(p("1234567890")))
ist(simple.length, 1)
ist(JSON.stringify(simple[0].deleted.map(s => [s.length, s.data])),
JSON.stringify([[3, 0], [4, 1], [4, 2]]))
ist(JSON.stringify(simple[0].inserted.map(s => [s.length, s.data])),
JSON.stringify([[3, 0], [4, 1], [3, 2]]))
})
})
function range(array: number[], author = 0) {
let [fromA, toA] = array
let [fromB, toB] = array.length > 2 ? array.slice(2) : array
return new Change(fromA, toA, fromB, toB, [new Span(toA - fromA, author)], [new Span(toB - fromB, author)])
}
function test(changes: number[][], doc: Node, result: number[][]) {
let ranges = changes.map(range)
ist(JSON.stringify(simplifyChanges(ranges, doc).map((r, i) => {
if (result[i] && result[i].length > 2) return [r.fromA, r.toA, r.fromB, r.toB]
else return [r.fromB, r.toB]
})), JSON.stringify(result))
}