Docs: add zh-CN entrypoint translations (#6300)
* Docs: add zh-CN entrypoint translations * Docs: harden docs-i18n parsing
This commit is contained in:
29
scripts/docs-i18n/glossary.go
Normal file
29
scripts/docs-i18n/glossary.go
Normal file
@@ -0,0 +1,29 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
)
|
||||
|
||||
type GlossaryEntry struct {
|
||||
Source string `json:"source"`
|
||||
Target string `json:"target"`
|
||||
}
|
||||
|
||||
func LoadGlossary(path string) ([]GlossaryEntry, error) {
|
||||
data, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
if errors.Is(err, os.ErrNotExist) {
|
||||
return nil, nil
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
var entries []GlossaryEntry
|
||||
if err := json.Unmarshal(data, &entries); err != nil {
|
||||
return nil, fmt.Errorf("glossary parse failed: %w", err)
|
||||
}
|
||||
|
||||
return entries, nil
|
||||
}
|
||||
10
scripts/docs-i18n/go.mod
Normal file
10
scripts/docs-i18n/go.mod
Normal file
@@ -0,0 +1,10 @@
|
||||
module github.com/openclaw/openclaw/scripts/docs-i18n
|
||||
|
||||
go 1.22
|
||||
|
||||
require (
|
||||
github.com/joshp123/pi-golang v0.0.4
|
||||
github.com/yuin/goldmark v1.7.8
|
||||
golang.org/x/net v0.24.0
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
)
|
||||
10
scripts/docs-i18n/go.sum
Normal file
10
scripts/docs-i18n/go.sum
Normal file
@@ -0,0 +1,10 @@
|
||||
github.com/joshp123/pi-golang v0.0.4 h1:82HISyKNN8bIl2lvAd65462LVCQIsjhaUFQxyQgg5Xk=
|
||||
github.com/joshp123/pi-golang v0.0.4/go.mod h1:9mHEQkeJELYzubXU3b86/T8yedI/iAOKx0Tz0c41qes=
|
||||
github.com/yuin/goldmark v1.7.8 h1:iERMLn0/QJeHFhxSt3p6PeN9mGnvIKSpG9YYorDMnic=
|
||||
github.com/yuin/goldmark v1.7.8/go.mod h1:uzxRWxtg69N339t3louHJ7+O03ezfj6PlliRlaOzY1E=
|
||||
golang.org/x/net v0.24.0 h1:1PcaxkF854Fu3+lvBIx5SYn9wRlBzzcnHZSiaFFAb0w=
|
||||
golang.org/x/net v0.24.0/go.mod h1:2Q7sJY5mzlzWjKtYUEXSlBWCdyaioyXzRB2RtU8KVE8=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
160
scripts/docs-i18n/html_translate.go
Normal file
160
scripts/docs-i18n/html_translate.go
Normal file
@@ -0,0 +1,160 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io"
|
||||
"strings"
|
||||
|
||||
"github.com/yuin/goldmark"
|
||||
"github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/extension"
|
||||
"github.com/yuin/goldmark/text"
|
||||
"golang.org/x/net/html"
|
||||
"sort"
|
||||
)
|
||||
|
||||
type htmlReplacement struct {
|
||||
Start int
|
||||
Stop int
|
||||
Value string
|
||||
}
|
||||
|
||||
func translateHTMLBlocks(ctx context.Context, translator *PiTranslator, body, srcLang, tgtLang string) (string, error) {
|
||||
source := []byte(body)
|
||||
r := text.NewReader(source)
|
||||
md := goldmark.New(
|
||||
goldmark.WithExtensions(extension.GFM),
|
||||
)
|
||||
doc := md.Parser().Parse(r)
|
||||
|
||||
replacements := make([]htmlReplacement, 0, 8)
|
||||
|
||||
_ = ast.Walk(doc, func(n ast.Node, entering bool) (ast.WalkStatus, error) {
|
||||
if !entering {
|
||||
return ast.WalkContinue, nil
|
||||
}
|
||||
block, ok := n.(*ast.HTMLBlock)
|
||||
if !ok {
|
||||
return ast.WalkContinue, nil
|
||||
}
|
||||
start, stop, ok := htmlBlockSpan(block, source)
|
||||
if !ok {
|
||||
return ast.WalkSkipChildren, nil
|
||||
}
|
||||
htmlText := string(source[start:stop])
|
||||
translated, err := translateHTMLBlock(ctx, translator, htmlText, srcLang, tgtLang)
|
||||
if err != nil {
|
||||
return ast.WalkStop, err
|
||||
}
|
||||
replacements = append(replacements, htmlReplacement{Start: start, Stop: stop, Value: translated})
|
||||
return ast.WalkSkipChildren, nil
|
||||
})
|
||||
|
||||
if len(replacements) == 0 {
|
||||
return body, nil
|
||||
}
|
||||
|
||||
return applyHTMLReplacements(body, replacements), nil
|
||||
}
|
||||
|
||||
func htmlBlockSpan(block *ast.HTMLBlock, source []byte) (int, int, bool) {
|
||||
lines := block.Lines()
|
||||
if lines.Len() == 0 {
|
||||
return 0, 0, false
|
||||
}
|
||||
start := lines.At(0).Start
|
||||
stop := lines.At(lines.Len() - 1).Stop
|
||||
if start >= stop {
|
||||
return 0, 0, false
|
||||
}
|
||||
return start, stop, true
|
||||
}
|
||||
|
||||
func applyHTMLReplacements(body string, replacements []htmlReplacement) string {
|
||||
if len(replacements) == 0 {
|
||||
return body
|
||||
}
|
||||
sortHTMLReplacements(replacements)
|
||||
var out strings.Builder
|
||||
last := 0
|
||||
for _, rep := range replacements {
|
||||
if rep.Start < last {
|
||||
continue
|
||||
}
|
||||
out.WriteString(body[last:rep.Start])
|
||||
out.WriteString(rep.Value)
|
||||
last = rep.Stop
|
||||
}
|
||||
out.WriteString(body[last:])
|
||||
return out.String()
|
||||
}
|
||||
|
||||
func sortHTMLReplacements(replacements []htmlReplacement) {
|
||||
sort.Slice(replacements, func(i, j int) bool {
|
||||
return replacements[i].Start < replacements[j].Start
|
||||
})
|
||||
}
|
||||
|
||||
func translateHTMLBlock(ctx context.Context, translator *PiTranslator, htmlText, srcLang, tgtLang string) (string, error) {
|
||||
tokenizer := html.NewTokenizer(strings.NewReader(htmlText))
|
||||
var out strings.Builder
|
||||
skipDepth := 0
|
||||
|
||||
for {
|
||||
tt := tokenizer.Next()
|
||||
if tt == html.ErrorToken {
|
||||
if err := tokenizer.Err(); err != nil && err != io.EOF {
|
||||
return "", err
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
raw := string(tokenizer.Raw())
|
||||
tok := tokenizer.Token()
|
||||
|
||||
switch tt {
|
||||
case html.StartTagToken:
|
||||
out.WriteString(raw)
|
||||
if isSkipTag(strings.ToLower(tok.Data)) {
|
||||
skipDepth++
|
||||
}
|
||||
case html.EndTagToken:
|
||||
out.WriteString(raw)
|
||||
if isSkipTag(strings.ToLower(tok.Data)) && skipDepth > 0 {
|
||||
skipDepth--
|
||||
}
|
||||
case html.SelfClosingTagToken:
|
||||
out.WriteString(raw)
|
||||
case html.TextToken:
|
||||
if shouldTranslateHTMLText(skipDepth, raw) {
|
||||
translated, err := translator.Translate(ctx, raw, srcLang, tgtLang)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
out.WriteString(translated)
|
||||
} else {
|
||||
out.WriteString(raw)
|
||||
}
|
||||
default:
|
||||
out.WriteString(raw)
|
||||
}
|
||||
}
|
||||
|
||||
return out.String(), nil
|
||||
}
|
||||
|
||||
func shouldTranslateHTMLText(skipDepth int, text string) bool {
|
||||
if strings.TrimSpace(text) == "" {
|
||||
return false
|
||||
}
|
||||
return skipDepth == 0
|
||||
}
|
||||
|
||||
func isSkipTag(tag string) bool {
|
||||
switch tag {
|
||||
case "code", "pre", "script", "style":
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
58
scripts/docs-i18n/main.go
Normal file
58
scripts/docs-i18n/main.go
Normal file
@@ -0,0 +1,58 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"flag"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
func main() {
|
||||
var (
|
||||
targetLang = flag.String("lang", "zh-CN", "target language (e.g., zh-CN)")
|
||||
sourceLang = flag.String("src", "en", "source language")
|
||||
docsRoot = flag.String("docs", "docs", "docs root")
|
||||
tmPath = flag.String("tm", "", "translation memory path")
|
||||
)
|
||||
flag.Parse()
|
||||
files := flag.Args()
|
||||
if len(files) == 0 {
|
||||
fatal(fmt.Errorf("no doc files provided"))
|
||||
}
|
||||
|
||||
resolvedDocsRoot, err := filepath.Abs(*docsRoot)
|
||||
if err != nil {
|
||||
fatal(err)
|
||||
}
|
||||
|
||||
if *tmPath == "" {
|
||||
*tmPath = filepath.Join(resolvedDocsRoot, ".i18n", fmt.Sprintf("%s.tm.jsonl", *targetLang))
|
||||
}
|
||||
|
||||
glossaryPath := filepath.Join(resolvedDocsRoot, ".i18n", fmt.Sprintf("glossary.%s.json", *targetLang))
|
||||
glossary, err := LoadGlossary(glossaryPath)
|
||||
if err != nil {
|
||||
fatal(err)
|
||||
}
|
||||
|
||||
translator, err := NewPiTranslator(*sourceLang, *targetLang, glossary)
|
||||
if err != nil {
|
||||
fatal(err)
|
||||
}
|
||||
defer translator.Close()
|
||||
|
||||
tm, err := LoadTranslationMemory(*tmPath)
|
||||
if err != nil {
|
||||
fatal(err)
|
||||
}
|
||||
|
||||
for _, file := range files {
|
||||
if err := processFile(context.Background(), translator, tm, resolvedDocsRoot, file, *sourceLang, *targetLang); err != nil {
|
||||
fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
if err := tm.Save(); err != nil {
|
||||
fatal(err)
|
||||
}
|
||||
}
|
||||
131
scripts/docs-i18n/markdown_segments.go
Normal file
131
scripts/docs-i18n/markdown_segments.go
Normal file
@@ -0,0 +1,131 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/yuin/goldmark"
|
||||
"github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/extension"
|
||||
"github.com/yuin/goldmark/text"
|
||||
)
|
||||
|
||||
func extractSegments(body, relPath string) ([]Segment, error) {
|
||||
source := []byte(body)
|
||||
r := text.NewReader(source)
|
||||
md := goldmark.New(
|
||||
goldmark.WithExtensions(extension.GFM),
|
||||
)
|
||||
doc := md.Parser().Parse(r)
|
||||
|
||||
segments := make([]Segment, 0, 128)
|
||||
skipDepth := 0
|
||||
var lastBlock ast.Node
|
||||
|
||||
err := ast.Walk(doc, func(n ast.Node, entering bool) (ast.WalkStatus, error) {
|
||||
switch n.(type) {
|
||||
case *ast.CodeBlock, *ast.FencedCodeBlock, *ast.CodeSpan, *ast.HTMLBlock, *ast.RawHTML:
|
||||
if entering {
|
||||
skipDepth++
|
||||
} else {
|
||||
skipDepth--
|
||||
}
|
||||
return ast.WalkContinue, nil
|
||||
}
|
||||
|
||||
if !entering || skipDepth > 0 {
|
||||
return ast.WalkContinue, nil
|
||||
}
|
||||
|
||||
textNode, ok := n.(*ast.Text)
|
||||
if !ok {
|
||||
return ast.WalkContinue, nil
|
||||
}
|
||||
block := blockParent(textNode)
|
||||
if block == nil {
|
||||
return ast.WalkContinue, nil
|
||||
}
|
||||
textValue := string(textNode.Segment.Value(source))
|
||||
if strings.TrimSpace(textValue) == "" {
|
||||
return ast.WalkContinue, nil
|
||||
}
|
||||
|
||||
start := textNode.Segment.Start
|
||||
stop := textNode.Segment.Stop
|
||||
if len(segments) > 0 && lastBlock == block {
|
||||
last := &segments[len(segments)-1]
|
||||
gap := string(source[last.Stop:start])
|
||||
if strings.TrimSpace(gap) == "" {
|
||||
last.Stop = stop
|
||||
return ast.WalkContinue, nil
|
||||
}
|
||||
}
|
||||
|
||||
segments = append(segments, Segment{Start: start, Stop: stop})
|
||||
lastBlock = block
|
||||
return ast.WalkContinue, nil
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
filtered := make([]Segment, 0, len(segments))
|
||||
for _, seg := range segments {
|
||||
textValue := string(source[seg.Start:seg.Stop])
|
||||
trimmed := strings.TrimSpace(textValue)
|
||||
if trimmed == "" {
|
||||
continue
|
||||
}
|
||||
textHash := hashText(textValue)
|
||||
segmentID := segmentID(relPath, textHash)
|
||||
filtered = append(filtered, Segment{
|
||||
Start: seg.Start,
|
||||
Stop: seg.Stop,
|
||||
Text: textValue,
|
||||
TextHash: textHash,
|
||||
SegmentID: segmentID,
|
||||
})
|
||||
}
|
||||
|
||||
sort.Slice(filtered, func(i, j int) bool {
|
||||
return filtered[i].Start < filtered[j].Start
|
||||
})
|
||||
|
||||
return filtered, nil
|
||||
}
|
||||
|
||||
func blockParent(n ast.Node) ast.Node {
|
||||
for node := n.Parent(); node != nil; node = node.Parent() {
|
||||
if isTranslatableBlock(node) {
|
||||
return node
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func isTranslatableBlock(n ast.Node) bool {
|
||||
switch n.(type) {
|
||||
case *ast.Paragraph, *ast.Heading, *ast.ListItem:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func applyTranslations(body string, segments []Segment) string {
|
||||
if len(segments) == 0 {
|
||||
return body
|
||||
}
|
||||
var out strings.Builder
|
||||
last := 0
|
||||
for _, seg := range segments {
|
||||
if seg.Start < last {
|
||||
continue
|
||||
}
|
||||
out.WriteString(body[last:seg.Start])
|
||||
out.WriteString(seg.Translated)
|
||||
last = seg.Stop
|
||||
}
|
||||
out.WriteString(body[last:])
|
||||
return out.String()
|
||||
}
|
||||
89
scripts/docs-i18n/masking.go
Normal file
89
scripts/docs-i18n/masking.go
Normal file
@@ -0,0 +1,89 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var (
|
||||
inlineCodeRe = regexp.MustCompile("`[^`]+`")
|
||||
angleLinkRe = regexp.MustCompile(`<https?://[^>]+>`)
|
||||
linkURLRe = regexp.MustCompile(`\[[^\]]*\]\(([^)]+)\)`)
|
||||
placeholderRe = regexp.MustCompile(`__OC_I18N_\d+__`)
|
||||
)
|
||||
|
||||
func maskMarkdown(text string, nextPlaceholder func() string, placeholders *[]string, mapping map[string]string) string {
|
||||
masked := maskMatches(text, inlineCodeRe, nextPlaceholder, placeholders, mapping)
|
||||
masked = maskMatches(masked, angleLinkRe, nextPlaceholder, placeholders, mapping)
|
||||
masked = maskLinkURLs(masked, nextPlaceholder, placeholders, mapping)
|
||||
return masked
|
||||
}
|
||||
|
||||
func maskMatches(text string, re *regexp.Regexp, nextPlaceholder func() string, placeholders *[]string, mapping map[string]string) string {
|
||||
matches := re.FindAllStringIndex(text, -1)
|
||||
if len(matches) == 0 {
|
||||
return text
|
||||
}
|
||||
var out strings.Builder
|
||||
pos := 0
|
||||
for _, span := range matches {
|
||||
start, end := span[0], span[1]
|
||||
if start < pos {
|
||||
continue
|
||||
}
|
||||
out.WriteString(text[pos:start])
|
||||
placeholder := nextPlaceholder()
|
||||
mapping[placeholder] = text[start:end]
|
||||
*placeholders = append(*placeholders, placeholder)
|
||||
out.WriteString(placeholder)
|
||||
pos = end
|
||||
}
|
||||
out.WriteString(text[pos:])
|
||||
return out.String()
|
||||
}
|
||||
|
||||
func maskLinkURLs(text string, nextPlaceholder func() string, placeholders *[]string, mapping map[string]string) string {
|
||||
matches := linkURLRe.FindAllStringSubmatchIndex(text, -1)
|
||||
if len(matches) == 0 {
|
||||
return text
|
||||
}
|
||||
var out strings.Builder
|
||||
pos := 0
|
||||
for _, span := range matches {
|
||||
fullStart := span[0]
|
||||
urlStart, urlEnd := span[2], span[3]
|
||||
if urlStart < 0 || urlEnd < 0 {
|
||||
continue
|
||||
}
|
||||
if fullStart < pos {
|
||||
continue
|
||||
}
|
||||
out.WriteString(text[pos:urlStart])
|
||||
placeholder := nextPlaceholder()
|
||||
mapping[placeholder] = text[urlStart:urlEnd]
|
||||
*placeholders = append(*placeholders, placeholder)
|
||||
out.WriteString(placeholder)
|
||||
pos = urlEnd
|
||||
}
|
||||
out.WriteString(text[pos:])
|
||||
return out.String()
|
||||
}
|
||||
|
||||
func unmaskMarkdown(text string, placeholders []string, mapping map[string]string) string {
|
||||
out := text
|
||||
for _, placeholder := range placeholders {
|
||||
original := mapping[placeholder]
|
||||
out = strings.ReplaceAll(out, placeholder, original)
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func validatePlaceholders(text string, placeholders []string) error {
|
||||
for _, placeholder := range placeholders {
|
||||
if !strings.Contains(text, placeholder) {
|
||||
return fmt.Errorf("placeholder missing: %s", placeholder)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
30
scripts/docs-i18n/placeholders.go
Normal file
30
scripts/docs-i18n/placeholders.go
Normal file
@@ -0,0 +1,30 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
type PlaceholderState struct {
|
||||
counter int
|
||||
used map[string]struct{}
|
||||
}
|
||||
|
||||
func NewPlaceholderState(text string) *PlaceholderState {
|
||||
used := map[string]struct{}{}
|
||||
for _, hit := range placeholderRe.FindAllString(text, -1) {
|
||||
used[hit] = struct{}{}
|
||||
}
|
||||
return &PlaceholderState{counter: 900000, used: used}
|
||||
}
|
||||
|
||||
func (s *PlaceholderState) Next() string {
|
||||
for {
|
||||
candidate := fmt.Sprintf("__OC_I18N_%d__", s.counter)
|
||||
s.counter++
|
||||
if _, ok := s.used[candidate]; ok {
|
||||
continue
|
||||
}
|
||||
s.used[candidate] = struct{}{}
|
||||
return candidate
|
||||
}
|
||||
}
|
||||
205
scripts/docs-i18n/process.go
Normal file
205
scripts/docs-i18n/process.go
Normal file
@@ -0,0 +1,205 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
func processFile(ctx context.Context, translator *PiTranslator, tm *TranslationMemory, docsRoot, filePath, srcLang, tgtLang string) error {
|
||||
absPath, err := filepath.Abs(filePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
relPath, err := filepath.Rel(docsRoot, absPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if relPath == "." || relPath == "" {
|
||||
return fmt.Errorf("file %s resolves to docs root %s", absPath, docsRoot)
|
||||
}
|
||||
if filepath.IsAbs(relPath) || relPath == ".." || strings.HasPrefix(relPath, ".."+string(filepath.Separator)) {
|
||||
return fmt.Errorf("file %s not under docs root %s", absPath, docsRoot)
|
||||
}
|
||||
|
||||
content, err := os.ReadFile(absPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
frontMatter, body := splitFrontMatter(string(content))
|
||||
frontData := map[string]any{}
|
||||
if frontMatter != "" {
|
||||
if err := yaml.Unmarshal([]byte(frontMatter), &frontData); err != nil {
|
||||
return fmt.Errorf("frontmatter parse failed for %s: %w", relPath, err)
|
||||
}
|
||||
}
|
||||
|
||||
if err := translateFrontMatter(ctx, translator, tm, frontData, relPath, srcLang, tgtLang); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
body, err = translateHTMLBlocks(ctx, translator, body, srcLang, tgtLang)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
segments, err := extractSegments(body, relPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
namespace := cacheNamespace()
|
||||
for i := range segments {
|
||||
seg := &segments[i]
|
||||
seg.CacheKey = cacheKey(namespace, srcLang, tgtLang, seg.SegmentID, seg.TextHash)
|
||||
if entry, ok := tm.Get(seg.CacheKey); ok {
|
||||
seg.Translated = entry.Translated
|
||||
continue
|
||||
}
|
||||
translated, err := translator.Translate(ctx, seg.Text, srcLang, tgtLang)
|
||||
if err != nil {
|
||||
return fmt.Errorf("translate failed (%s): %w", relPath, err)
|
||||
}
|
||||
seg.Translated = translated
|
||||
entry := TMEntry{
|
||||
CacheKey: seg.CacheKey,
|
||||
SegmentID: seg.SegmentID,
|
||||
SourcePath: relPath,
|
||||
TextHash: seg.TextHash,
|
||||
Text: seg.Text,
|
||||
Translated: translated,
|
||||
Provider: providerName,
|
||||
Model: modelVersion,
|
||||
SrcLang: srcLang,
|
||||
TgtLang: tgtLang,
|
||||
UpdatedAt: time.Now().UTC().Format(time.RFC3339),
|
||||
}
|
||||
tm.Put(entry)
|
||||
}
|
||||
|
||||
translatedBody := applyTranslations(body, segments)
|
||||
updatedFront, err := encodeFrontMatter(frontData, relPath, content)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
outputPath := filepath.Join(docsRoot, tgtLang, relPath)
|
||||
if err := os.MkdirAll(filepath.Dir(outputPath), 0o755); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
output := updatedFront + translatedBody
|
||||
return os.WriteFile(outputPath, []byte(output), 0o644)
|
||||
}
|
||||
|
||||
func splitFrontMatter(content string) (string, string) {
|
||||
if !strings.HasPrefix(content, "---") {
|
||||
return "", content
|
||||
}
|
||||
lines := strings.Split(content, "\n")
|
||||
if len(lines) < 2 {
|
||||
return "", content
|
||||
}
|
||||
endIndex := -1
|
||||
for i := 1; i < len(lines); i++ {
|
||||
if strings.TrimSpace(lines[i]) == "---" {
|
||||
endIndex = i
|
||||
break
|
||||
}
|
||||
}
|
||||
if endIndex == -1 {
|
||||
return "", content
|
||||
}
|
||||
front := strings.Join(lines[1:endIndex], "\n")
|
||||
body := strings.Join(lines[endIndex+1:], "\n")
|
||||
if strings.HasPrefix(body, "\n") {
|
||||
body = body[1:]
|
||||
}
|
||||
return front, body
|
||||
}
|
||||
|
||||
func encodeFrontMatter(frontData map[string]any, relPath string, source []byte) (string, error) {
|
||||
if len(frontData) == 0 {
|
||||
return "", nil
|
||||
}
|
||||
frontData["x-i18n"] = map[string]any{
|
||||
"source_path": relPath,
|
||||
"source_hash": hashBytes(source),
|
||||
"provider": providerName,
|
||||
"model": modelVersion,
|
||||
"workflow": workflowVersion,
|
||||
"generated_at": time.Now().UTC().Format(time.RFC3339),
|
||||
}
|
||||
encoded, err := yaml.Marshal(frontData)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return fmt.Sprintf("---\n%s---\n\n", string(encoded)), nil
|
||||
}
|
||||
|
||||
func translateFrontMatter(ctx context.Context, translator *PiTranslator, tm *TranslationMemory, data map[string]any, relPath, srcLang, tgtLang string) error {
|
||||
if len(data) == 0 {
|
||||
return nil
|
||||
}
|
||||
if summary, ok := data["summary"].(string); ok {
|
||||
translated, err := translateSnippet(ctx, translator, tm, relPath+":frontmatter:summary", summary, srcLang, tgtLang)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
data["summary"] = translated
|
||||
}
|
||||
if readWhen, ok := data["read_when"].([]any); ok {
|
||||
translated := make([]any, 0, len(readWhen))
|
||||
for idx, item := range readWhen {
|
||||
textValue, ok := item.(string)
|
||||
if !ok {
|
||||
translated = append(translated, item)
|
||||
continue
|
||||
}
|
||||
value, err := translateSnippet(ctx, translator, tm, fmt.Sprintf("%s:frontmatter:read_when:%d", relPath, idx), textValue, srcLang, tgtLang)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
translated = append(translated, value)
|
||||
}
|
||||
data["read_when"] = translated
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func translateSnippet(ctx context.Context, translator *PiTranslator, tm *TranslationMemory, segmentID, textValue, srcLang, tgtLang string) (string, error) {
|
||||
if strings.TrimSpace(textValue) == "" {
|
||||
return textValue, nil
|
||||
}
|
||||
namespace := cacheNamespace()
|
||||
textHash := hashText(textValue)
|
||||
ck := cacheKey(namespace, srcLang, tgtLang, segmentID, textHash)
|
||||
if entry, ok := tm.Get(ck); ok {
|
||||
return entry.Translated, nil
|
||||
}
|
||||
translated, err := translator.Translate(ctx, textValue, srcLang, tgtLang)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
entry := TMEntry{
|
||||
CacheKey: ck,
|
||||
SegmentID: segmentID,
|
||||
SourcePath: segmentID,
|
||||
TextHash: textHash,
|
||||
Text: textValue,
|
||||
Translated: translated,
|
||||
Provider: providerName,
|
||||
Model: modelVersion,
|
||||
SrcLang: srcLang,
|
||||
TgtLang: tgtLang,
|
||||
UpdatedAt: time.Now().UTC().Format(time.RFC3339),
|
||||
}
|
||||
tm.Put(entry)
|
||||
return translated, nil
|
||||
}
|
||||
11
scripts/docs-i18n/segment.go
Normal file
11
scripts/docs-i18n/segment.go
Normal file
@@ -0,0 +1,11 @@
|
||||
package main
|
||||
|
||||
type Segment struct {
|
||||
Start int
|
||||
Stop int
|
||||
Text string
|
||||
TextHash string
|
||||
SegmentID string
|
||||
Translated string
|
||||
CacheKey string
|
||||
}
|
||||
126
scripts/docs-i18n/tm.go
Normal file
126
scripts/docs-i18n/tm.go
Normal file
@@ -0,0 +1,126 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type TMEntry struct {
|
||||
CacheKey string `json:"cache_key"`
|
||||
SegmentID string `json:"segment_id"`
|
||||
SourcePath string `json:"source_path"`
|
||||
TextHash string `json:"text_hash"`
|
||||
Text string `json:"text"`
|
||||
Translated string `json:"translated"`
|
||||
Provider string `json:"provider"`
|
||||
Model string `json:"model"`
|
||||
SrcLang string `json:"src_lang"`
|
||||
TgtLang string `json:"tgt_lang"`
|
||||
UpdatedAt string `json:"updated_at"`
|
||||
}
|
||||
|
||||
type TranslationMemory struct {
|
||||
path string
|
||||
entries map[string]TMEntry
|
||||
}
|
||||
|
||||
func LoadTranslationMemory(path string) (*TranslationMemory, error) {
|
||||
tm := &TranslationMemory{path: path, entries: map[string]TMEntry{}}
|
||||
file, err := os.Open(path)
|
||||
if err != nil {
|
||||
if errors.Is(err, os.ErrNotExist) {
|
||||
return tm, nil
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
reader := bufio.NewReader(file)
|
||||
for {
|
||||
line, err := reader.ReadBytes('\n')
|
||||
if len(line) > 0 {
|
||||
trimmed := strings.TrimSpace(string(line))
|
||||
if trimmed != "" {
|
||||
var entry TMEntry
|
||||
if err := json.Unmarshal([]byte(trimmed), &entry); err != nil {
|
||||
return nil, fmt.Errorf("translation memory decode failed: %w", err)
|
||||
}
|
||||
if entry.CacheKey != "" {
|
||||
tm.entries[entry.CacheKey] = entry
|
||||
}
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
if errors.Is(err, io.EOF) {
|
||||
break
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return tm, nil
|
||||
}
|
||||
|
||||
func (tm *TranslationMemory) Get(cacheKey string) (TMEntry, bool) {
|
||||
entry, ok := tm.entries[cacheKey]
|
||||
return entry, ok
|
||||
}
|
||||
|
||||
func (tm *TranslationMemory) Put(entry TMEntry) {
|
||||
if entry.CacheKey == "" {
|
||||
return
|
||||
}
|
||||
tm.entries[entry.CacheKey] = entry
|
||||
}
|
||||
|
||||
func (tm *TranslationMemory) Save() error {
|
||||
if tm.path == "" {
|
||||
return nil
|
||||
}
|
||||
if err := os.MkdirAll(filepath.Dir(tm.path), 0o755); err != nil {
|
||||
return err
|
||||
}
|
||||
tmpPath := tm.path + ".tmp"
|
||||
file, err := os.Create(tmpPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
keys := make([]string, 0, len(tm.entries))
|
||||
for key := range tm.entries {
|
||||
keys = append(keys, key)
|
||||
}
|
||||
sort.Strings(keys)
|
||||
|
||||
writer := bufio.NewWriter(file)
|
||||
for _, key := range keys {
|
||||
entry := tm.entries[key]
|
||||
payload, err := json.Marshal(entry)
|
||||
if err != nil {
|
||||
_ = file.Close()
|
||||
return err
|
||||
}
|
||||
if _, err := writer.Write(payload); err != nil {
|
||||
_ = file.Close()
|
||||
return err
|
||||
}
|
||||
if _, err := writer.WriteString("\n"); err != nil {
|
||||
_ = file.Close()
|
||||
return err
|
||||
}
|
||||
}
|
||||
if err := writer.Flush(); err != nil {
|
||||
_ = file.Close()
|
||||
return err
|
||||
}
|
||||
if err := file.Close(); err != nil {
|
||||
return err
|
||||
}
|
||||
return os.Rename(tmpPath, tm.path)
|
||||
}
|
||||
104
scripts/docs-i18n/translator.go
Normal file
104
scripts/docs-i18n/translator.go
Normal file
@@ -0,0 +1,104 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
pi "github.com/joshp123/pi-golang"
|
||||
)
|
||||
|
||||
type PiTranslator struct {
|
||||
client *pi.OneShotClient
|
||||
}
|
||||
|
||||
func NewPiTranslator(srcLang, tgtLang string, glossary []GlossaryEntry) (*PiTranslator, error) {
|
||||
options := pi.DefaultOneShotOptions()
|
||||
options.AppName = "openclaw-docs-i18n"
|
||||
options.Mode = pi.ModeDragons
|
||||
options.Dragons = pi.DragonsOptions{
|
||||
Provider: "anthropic",
|
||||
Model: modelVersion,
|
||||
Thinking: "high",
|
||||
}
|
||||
options.SystemPrompt = translationPrompt(srcLang, tgtLang, glossary)
|
||||
client, err := pi.StartOneShot(options)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &PiTranslator{client: client}, nil
|
||||
}
|
||||
|
||||
func (t *PiTranslator) Translate(ctx context.Context, text, srcLang, tgtLang string) (string, error) {
|
||||
if t.client == nil {
|
||||
return "", errors.New("pi client unavailable")
|
||||
}
|
||||
prefix, core, suffix := splitWhitespace(text)
|
||||
if core == "" {
|
||||
return text, nil
|
||||
}
|
||||
state := NewPlaceholderState(core)
|
||||
placeholders := make([]string, 0, 8)
|
||||
mapping := map[string]string{}
|
||||
masked := maskMarkdown(core, state.Next, &placeholders, mapping)
|
||||
res, err := t.client.Run(ctx, masked)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
translated := strings.TrimSpace(res.Text)
|
||||
if err := validatePlaceholders(translated, placeholders); err != nil {
|
||||
return "", err
|
||||
}
|
||||
translated = unmaskMarkdown(translated, placeholders, mapping)
|
||||
return prefix + translated + suffix, nil
|
||||
}
|
||||
|
||||
func (t *PiTranslator) Close() {
|
||||
if t.client != nil {
|
||||
_ = t.client.Close()
|
||||
}
|
||||
}
|
||||
|
||||
func translationPrompt(srcLang, tgtLang string, glossary []GlossaryEntry) string {
|
||||
srcLabel := srcLang
|
||||
tgtLabel := tgtLang
|
||||
if strings.EqualFold(srcLang, "en") {
|
||||
srcLabel = "English"
|
||||
}
|
||||
if strings.EqualFold(tgtLang, "zh-CN") {
|
||||
tgtLabel = "Simplified Chinese"
|
||||
}
|
||||
glossaryBlock := buildGlossaryPrompt(glossary)
|
||||
return strings.TrimSpace(fmt.Sprintf(`You are a translation function, not a chat assistant.
|
||||
Translate from %s to %s.
|
||||
|
||||
Rules:
|
||||
- Output ONLY the translated text. No preamble, no questions, no commentary.
|
||||
- Preserve Markdown syntax exactly (headings, lists, tables, emphasis).
|
||||
- Do not translate code spans/blocks, config keys, CLI flags, or env vars.
|
||||
- Do not alter URLs or anchors.
|
||||
- Preserve placeholders exactly: __OC_I18N_####__.
|
||||
- Use neutral technical Chinese; avoid slang or jokes.
|
||||
- Keep product names in English: OpenClaw, Gateway, Pi, WhatsApp, Telegram, Discord, iMessage, Slack, Microsoft Teams, Google Chat, Signal.
|
||||
|
||||
%s
|
||||
|
||||
If the input is empty, output empty.
|
||||
If the input contains only placeholders, output it unchanged.`, srcLabel, tgtLabel, glossaryBlock))
|
||||
}
|
||||
|
||||
func buildGlossaryPrompt(glossary []GlossaryEntry) string {
|
||||
if len(glossary) == 0 {
|
||||
return ""
|
||||
}
|
||||
var lines []string
|
||||
lines = append(lines, "Preferred translations (use when natural):")
|
||||
for _, entry := range glossary {
|
||||
if entry.Source == "" || entry.Target == "" {
|
||||
continue
|
||||
}
|
||||
lines = append(lines, fmt.Sprintf("- %s -> %s", entry.Source, entry.Target))
|
||||
}
|
||||
return strings.Join(lines, "\n")
|
||||
}
|
||||
81
scripts/docs-i18n/util.go
Normal file
81
scripts/docs-i18n/util.go
Normal file
@@ -0,0 +1,81 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"strings"
|
||||
)
|
||||
|
||||
const (
|
||||
workflowVersion = 9
|
||||
providerName = "pi"
|
||||
modelVersion = "claude-opus-4-5"
|
||||
)
|
||||
|
||||
func cacheNamespace() string {
|
||||
return fmt.Sprintf("wf=%d|provider=%s|model=%s", workflowVersion, providerName, modelVersion)
|
||||
}
|
||||
|
||||
func cacheKey(namespace, srcLang, tgtLang, segmentID, textHash string) string {
|
||||
raw := fmt.Sprintf("%s|%s|%s|%s|%s", namespace, srcLang, tgtLang, segmentID, textHash)
|
||||
hash := sha256.Sum256([]byte(raw))
|
||||
return hex.EncodeToString(hash[:])
|
||||
}
|
||||
|
||||
func hashText(text string) string {
|
||||
normalized := normalizeText(text)
|
||||
hash := sha256.Sum256([]byte(normalized))
|
||||
return hex.EncodeToString(hash[:])
|
||||
}
|
||||
|
||||
func hashBytes(data []byte) string {
|
||||
hash := sha256.Sum256(data)
|
||||
return hex.EncodeToString(hash[:])
|
||||
}
|
||||
|
||||
func normalizeText(text string) string {
|
||||
return strings.Join(strings.Fields(strings.TrimSpace(text)), " ")
|
||||
}
|
||||
|
||||
func segmentID(relPath, textHash string) string {
|
||||
shortHash := textHash
|
||||
if len(shortHash) > 16 {
|
||||
shortHash = shortHash[:16]
|
||||
}
|
||||
return fmt.Sprintf("%s:%s", relPath, shortHash)
|
||||
}
|
||||
|
||||
func splitWhitespace(text string) (string, string, string) {
|
||||
if text == "" {
|
||||
return "", "", ""
|
||||
}
|
||||
start := 0
|
||||
for start < len(text) && isWhitespace(text[start]) {
|
||||
start++
|
||||
}
|
||||
end := len(text)
|
||||
for end > start && isWhitespace(text[end-1]) {
|
||||
end--
|
||||
}
|
||||
return text[:start], text[start:end], text[end:]
|
||||
}
|
||||
|
||||
func isWhitespace(b byte) bool {
|
||||
switch b {
|
||||
case ' ', '\t', '\n', '\r':
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func fatal(err error) {
|
||||
if err == nil {
|
||||
return
|
||||
}
|
||||
_, _ = io.WriteString(os.Stderr, err.Error()+"\n")
|
||||
os.Exit(1)
|
||||
}
|
||||
Reference in New Issue
Block a user