-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #65 from anggoran/dev
release/v0.5.0
- Loading branch information
Showing
21 changed files
with
136,195 additions
and
38 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,14 @@ | ||
import { WordModel } from "../models/hanzi.ts"; | ||
import { supabase } from "../utils/supabase.ts"; | ||
|
||
export const getWordList = async ( | ||
{ keyword, scroll }: { keyword: string; scroll: number }, | ||
) => { | ||
const contentPerScroll = 10; | ||
const { data, count } = await supabase.rpc("word_algo", { | ||
search_term: keyword, | ||
offset_value: (scroll - 1) * contentPerScroll, | ||
limit_value: contentPerScroll, | ||
}, { count: "exact" }); | ||
return { word: data as WordModel[], count }; | ||
}; |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,55 @@ | ||
import { useEffect, useRef, useState } from "preact/hooks"; | ||
import { WordModel } from "../models/hanzi.ts"; | ||
|
||
export default function InfiniteWords({ keyword }: { keyword: string }) { | ||
const [loading, setLoading] = useState(false); | ||
const [words, setWords] = useState<WordModel[]>([]); | ||
const [scroll, setScroll] = useState(1); | ||
const [hasMore, setHasMore] = useState(true); | ||
const loaderRef = useRef(null); | ||
|
||
const loadMore = async () => { | ||
setLoading(true); | ||
const { word: newWords, count } = await fetch( | ||
`/api/word?keyword=${keyword}&scroll=${scroll}`, | ||
).then((res) => res.json()); | ||
setWords((prevWords) => [...prevWords, ...newWords]); | ||
if (count < 10) setHasMore(false); | ||
setLoading(false); | ||
}; | ||
|
||
useEffect(() => { | ||
if (hasMore) loadMore(); | ||
}, [scroll]); | ||
|
||
useEffect(() => { | ||
const observer = new IntersectionObserver((entries) => { | ||
if (entries[0].isIntersecting && !loading) { | ||
setScroll((prevScroll) => prevScroll + 1); | ||
} | ||
}, { threshold: 1.0 }); | ||
|
||
const currentLoader = loaderRef.current; | ||
|
||
if (currentLoader) observer.observe(currentLoader); | ||
return () => { | ||
if (currentLoader) observer.unobserve(currentLoader); | ||
}; | ||
}, [loading]); | ||
|
||
return ( | ||
<> | ||
<div className="space-y-4"> | ||
{words.map((w) => ( | ||
<div key={w.id} ref={loaderRef} className="p-4 border rounded"> | ||
<p> | ||
<span class="font-bold">{w.hanzi}</span> {`(${w.pinyin})`} | ||
</p> | ||
<p className="text-gray-500">{w.english}</p> | ||
</div> | ||
))} | ||
</div> | ||
{loading && <p className="text-center mt-4">Loading items...</p>} | ||
</> | ||
); | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file was deleted.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,14 @@ | ||
import { FreshContext } from "$fresh/server.ts"; | ||
import { getWordList } from "../../controllers/word.ts"; | ||
|
||
export const handler = async (_req: Request, _ctx: FreshContext) => { | ||
const url = new URL(_req.url); | ||
const keyword = url.searchParams.get("keyword")!; | ||
const scroll = parseInt(url.searchParams.get("scroll")!); | ||
|
||
const data = await getWordList({ keyword, scroll }); | ||
|
||
return new Response(JSON.stringify(data), { | ||
headers: { "Content-Type": "application/json" }, | ||
}); | ||
}; |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -10,6 +10,7 @@ export default function Home() { | |
"/reading", | ||
"/writing", | ||
"/hanzi", | ||
"/word", | ||
].map((e) => ( | ||
<li> | ||
<a | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,32 @@ | ||
import { PageProps } from "$fresh/server.ts"; | ||
import InfiniteWords from "../../islands/InfiniteWords.tsx"; | ||
|
||
export default function WordPage(props: PageProps) { | ||
const keyword = props.url.searchParams.get("keyword") ?? ""; | ||
return ( | ||
<> | ||
<a href="/">Back to home</a> | ||
<div className="h-auto content-center bg-white"> | ||
<div className="container mx-auto p-4"> | ||
<h1 className="text-2xl font-bold mb-4">Word Database</h1> | ||
<form className="flex space-x-2 mb-4"> | ||
<input | ||
type="text" | ||
name="keyword" | ||
value={keyword} | ||
placeholder="Search for a word..." | ||
className="p-2 border rounded w-full" | ||
/> | ||
<button | ||
type="submit" | ||
className="px-4 py-2 bg-black text-white rounded" | ||
> | ||
Search | ||
</button> | ||
</form> | ||
{keyword && <InfiniteWords keyword={keyword} />} | ||
</div> | ||
</div> | ||
</> | ||
); | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,89 @@ | ||
import * as CSV from "jsr:@std/csv"; | ||
|
||
interface Hanzi { | ||
form: string; | ||
meaning: string; | ||
type: string; | ||
etymology: string; | ||
} | ||
|
||
const readPinyinCSV = async () => { | ||
const content = await Deno.readTextFile("./static/data/pinyin.csv"); | ||
return CSV.parse(content).map((row) => row[1] + row[2]).slice(1); | ||
}; | ||
|
||
const readUnihanTXT = async () => { | ||
const content = await Deno.readTextFile("./static/data/unihan.txt"); | ||
return content.trim().split("\n"); | ||
}; | ||
|
||
const readCedictTXT = async () => { | ||
const cedictContent = await Deno.readTextFile("./static/data/cedict.txt"); | ||
return cedictContent.split("\n"); | ||
}; | ||
|
||
const createUnihanList = async () => { | ||
const unihanLines = await readUnihanTXT(); | ||
return unihanLines.map((line) => { | ||
const parsedLine = JSON.parse(line); | ||
const hasPhilosophy = parsedLine["etymology"] !== undefined; | ||
return { | ||
character: parsedLine["character"], | ||
definition: parsedLine["definition"], | ||
type: hasPhilosophy ? parsedLine["etymology"]["type"] : "", | ||
etymology: hasPhilosophy ? parsedLine["etymology"]["hint"] : "", | ||
}; | ||
}); | ||
}; | ||
|
||
const createCedictList = async () => { | ||
const cedictLines = await readCedictTXT(); | ||
return cedictLines.map((line) => { | ||
const parts = line.split(" "); | ||
const data = { | ||
simplified: parts[1], | ||
pinyin: parts[2].slice(1, -1).toLowerCase() | ||
.replace("u:", "v").replace("5", ""), | ||
}; | ||
if (data.simplified === "儿" && data.pinyin === "r5") { | ||
data.pinyin = "er5"; | ||
} else if (data.simplified === "剋" && data.pinyin === "kei1") { | ||
data.pinyin = "ke4"; | ||
} else if (data.simplified === "忒" && data.pinyin === "tei1") { | ||
data.pinyin = "te4"; | ||
} | ||
return data; | ||
}); | ||
}; | ||
|
||
const createHanziList = async () => { | ||
const pinyinList = await readPinyinCSV(); | ||
const cedictList = await createCedictList(); | ||
const unihanList = await createUnihanList(); | ||
const hanziSet = new Set<string>(); | ||
const hanziList: Hanzi[] = []; | ||
cedictList.forEach((cedict) => { | ||
const unihan = unihanList.find((e) => e.character === cedict.simplified); | ||
const pinyin = pinyinList.find((e) => e === cedict.pinyin); | ||
if (unihan && pinyin && !hanziSet.has(cedict.simplified)) { | ||
hanziList.push({ | ||
form: cedict.simplified, | ||
meaning: unihan.definition, | ||
type: unihan.type, | ||
etymology: unihan.etymology, | ||
}); | ||
hanziSet.add(cedict.simplified); | ||
} | ||
}); | ||
return hanziList; | ||
}; | ||
|
||
const writeHanziCSV = async () => { | ||
const hanziList = await createHanziList(); | ||
const content = CSV.stringify(hanziList as unknown as CSV.DataItem[], { | ||
columns: ["form", "meaning", "type", "etymology"], | ||
}); | ||
await Deno.writeTextFile("./static/data/hanzi.csv", content); | ||
}; | ||
|
||
await writeHanziCSV(); |
Oops, something went wrong.