Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/custom.txt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
{"Caiyun(Token Required)": {"url": "https://fanyi.caiyunapp.com/", "key_edit": true, "secret_edit": false, "is_queue": true, "target": "caiyun.target.rst", "source": "caiyun.source.rst", "file_name": "_caiyun.py"}, "Baidu(Token Required)": {"url": "https://fanyi-api.baidu.com/", "key_edit": true, "secret_edit": true, "is_queue": false, "target": "baidu.target.rst", "source": "baidu.source.rst", "file_name": "_baidu.py"}, "Deepl(Token Required)": {"url": "https://www.deepl.com/account/?utm_source=github&utm_medium=github-python-readme", "key_edit": true, "secret_edit": false, "is_queue": true, "target": "deepl.target.rst", "source": "deepl.source.rst", "file_name": "_deepl.py"}, "OpenAI(Token Required)": {"url": "https://platform.openai.com/api-keys", "key_edit": true, "secret_edit": false, "is_queue": true, "target": "openai.target.rst", "source": "openai.source.rst", "file_name": "_openai.py"}}
{"Caiyun(Token Required)": {"url": "https://fanyi.caiyunapp.com/", "key_edit": true, "secret_edit": false, "is_queue": true, "target": "caiyun.target.rst", "source": "caiyun.source.rst", "file_name": "_caiyun.py"}, "Baidu(Token Required)": {"url": "https://fanyi-api.baidu.com/", "key_edit": true, "secret_edit": true, "is_queue": false, "target": "baidu.target.rst", "source": "baidu.source.rst", "file_name": "_baidu.py"}, "Deepl(Token Required)": {"url": "https://www.deepl.com/account/?utm_source=github&utm_medium=github-python-readme", "key_edit": true, "secret_edit": false, "is_queue": true, "target": "deepl.target.rst", "source": "deepl.source.rst", "file_name": "_deepl.py"}, "OpenAI(Token Required)": {"url": "https://platform.openai.com/api-keys", "key_edit": true, "secret_edit": false, "is_queue": true, "target": "openai.target.rst", "source": "openai.source.rst", "file_name": "_openai.py"}, "Gemini": {"url": "", "key_edit": true, "secret_edit": false, "is_queue": true, "target": "openai.target.rst", "source": "openai.source.rst", "file_name": "_gemini.py"}}
122 changes: 122 additions & 0 deletions src/custom_engine/_gemini.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,122 @@
import threading
import json
import os
import io
import time
import concurrent.futures
import traceback
import requests

limit_time_span_dic = dict()
lock = threading.Lock()
count = 0
api_key = ""
rpm = 0
rps = 0
tpm = 0
model = ""
base_url = ""
proxies = None
time_out = 0
max_length = 0
gemini_template_file = 'openai_template.json'


def translate_queue(app_key, app_secret, source, target, proxies, q):
# write import inside the function , otherwise will cause NameError

global gemini_template_file
def translate_gemini_batch(api_key_to_use, source_lang, target_lang, proxy_settings, text_list):

global model, time_out
if not model:
model = 'gemini-2.0-flash'
if not time_out or time_out <= 0:
time_out = 120

gemini_api_url = f"https://generativelanguage.googleapis.com/v1beta/models/{model}:generateContent?key={api_key_to_use}"

ori_dic = {str(i): text for i, text in enumerate(text_list)}
json_to_translate = json.dumps(ori_dic, ensure_ascii=False)

messages = []
try:
with io.open(gemini_template_file, 'r', encoding='utf-8') as f:
template_content = f.read()
template_content = template_content.replace('#SOURCE_LANGUAGE_ID!@$^#', source)
template_content = template_content.replace('#TARGET_LANGAUGE_ID!@$^#', target)
messages = json.loads(template_content)
except Exception as e:
print(f"Error reading or parsing {gemini_template_file}: {e}")
return None

if not messages:
print(f'{gemini_template_file} is not a valid json template or is empty.')
return None
full_prompt_text = ""
for message in messages:
full_prompt_text += message.get("content", "") + "\n"

full_prompt_text = full_prompt_text.replace('#JSON_DATA_WAITING_FOR_TRANSLATE_ID!@$^#', json_to_translate)

payload = {
"contents": [
{
"role": "user",
"parts": [
{
"text": full_prompt_text
}
]
}
],
"generationConfig": {
"temperature": 0.5,
"topP": 1,
"topK": 1,
"maxOutputTokens": 8192,
"response_mime_type": "application/json"
}
}

try:
response = requests.post(
gemini_api_url,
json=payload,
proxies=proxy_settings,
timeout=time_out
)
response.raise_for_status()

response_json = response.json()
content_text = response_json['candidates'][0]['content']['parts'][0]['text']
translated_dic = json.loads(content_text)

if len(translated_dic) != len(ori_dic):
print("Warning: Mismatch between original and translated item count.")

l = []
for key, translated_text in translated_dic.items():
if key in ori_dic:
item = {
'untranslatedText': ori_dic[key],
'translatedText': translated_text
}
l.append(item)

return l

except Exception:
if 'response' in locals():
print(response.status_code)
print(response.text)
msg = traceback.format_exc()
print(msg)
return []

api_key_to_use = app_key if app_key else api_key
if not api_key_to_use:
print("Gemini API key is missing.")
return []

return translate_gemini_batch(api_key_to_use, source, target, proxies, q)
1 change: 1 addition & 0 deletions src/engine.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"engine": "Gemini", "key": "", "secret": "", "Google(Free)_key": "", "Google(Free)_secret": "", "Google(Free)": {"target": "Chinese(Simplified)", "source": "Auto Detect"}, "Gemini_key": "", "Gemini_secret": "", "rpm": "3", "rps": "3", "tpm": "40000", "openai_model": "gpt-3.5-turbo", "openai_base_url": "", "openai_model_index": 0, "time_out": "120", "max_length": "5000", "Gemini": {"target": "Chinese(Simplified)", "source": "Auto Detect"}, "tl": "Chinese"}
88 changes: 67 additions & 21 deletions src/one_key_translate_form.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
from custom_engine_form import sourceDic, targetDic
from my_log import log_print
from renpy_translate import engineDic, language_header, translateThread, translate_threads, get_translated_dic, \
web_brower_export_name, rpy_info_dic, get_rpy_info, web_brower_translate, engineList
web_brower_export_name, rpy_info_dic, get_rpy_info, web_brower_translate, engineList, translate_file_single
from engine_form import MyEngineForm
from game_unpacker_form import finish_flag
from extract_runtime_form import extract_finish
Expand All @@ -36,13 +36,39 @@
from error_repair_form import repairThread
from translated_form import MyTranslatedForm

import concurrent.futures

class MyQueue(queue.Queue):
def peek(self):
"""Return the first element in the queue without removing it."""
with self.mutex:
return self.queue[0]

# Limit max threads using Thread Pool, default 5
class MyTranslationPoolWorker(QThread):
finished = Signal()
progress = Signal(str)

def __init__(self, tasks, max_workers = 5, parent = None):
super().__init__(parent)
self.tasks = tasks
self.max_workers = max_workers

def run(self):
with concurrent.futures.ThreadPoolExecutor(max_workers=self.max_workers) as executor:

future_to_task = {executor.submit(translate_file_single, **task_params): task_params for task_params in self.tasks}

for future in concurrent.futures.as_completed(future_to_task):
task_info = future_to_task[future]
try:
future.result()
self.progress.emit(f"Successfully processed: {task_info['p']}")
except Exception as exc:
self.progress.emit(f"Error processing {task_info['p']}: {exc}")

self.finished.emit()


class MyOneKeyTranslateForm(QDialog, Ui_OneKeyTranslateDialog):
def __init__(self, parent=None):
Expand Down Expand Up @@ -102,6 +128,8 @@ def __init__(self, parent=None):
self.overwriteCheckBox.setChecked(not is_skip_if_exist)
_thread.start_new_thread(self.update, ())

self.translation_pool_worker = None

def on_tl_path_changed(self):
if os.path.isfile('engine.txt'):
json_file = open('engine.txt', 'r',encoding='utf-8')
Expand Down Expand Up @@ -226,46 +254,64 @@ def translate(self):
target_language = targetDic[self.targetComboBox.currentText()]
if self.sourceComboBox.currentText() != '':
source_language = sourceDic[self.sourceComboBox.currentText()]

# create a local thread list instead of the global one
tasks = []
for path, dir_lst, file_lst in paths:
for file_name in file_lst:
i = os.path.join(path, file_name)
if not file_name.endswith("rpy"):
continue
t = translateThread(cnt, i, target_language, source_language,
True,
False, self.local_glossary, True,
True, self.filterCheckBox_2.isChecked(), self.filterLengthLineEdit_2.text(), True)
translate_threads.append(t)

# ------------ pack into params dict -----------------
task_params = {
'p': i,
'lang_target': target_language,
'lang_source': source_language,
'is_gen_bak': False,
'local_glossary': self.local_glossary,
'is_translate_current': True,
'is_skip_translated': True,
'is_open_filter': self.filterCheckBox_2.isChecked(),
'filter_length': int(self.filterLengthLineEdit_2.text()),
'is_replace_special_symbols': True
}

tasks.append(task_params)

cnt = cnt + 1
if len(translate_threads) > 0:

translate_threads.clear()


if len(tasks) > 0:
is_finished, is_executed = self.qDic[self.translate]
is_finished = False
self.qDic[self.translate] = is_finished, is_executed
log_print('start translate...')
for t in translate_threads:
t.start()
log_print("Starting translation workers")

self.setDisabled(True)
_thread.start_new_thread(self.translate_threads_over, ())

# UI might need a update for max workers
self.translation_pool_worker = MyTranslationPoolWorker(tasks)
self.translation_pool_worker.finished.connect(self.on_translate_finished)
self.translation_pool_worker.progress.connect(log_print)
self.translation_pool_worker.start()
else:
is_finished, is_executed = self.qDic[self.translate]
is_finished = True
self.qDic[self.translate] = is_finished, is_executed


else:
is_finished, is_executed = self.qDic[self.translate]
is_finished = True
self.qDic[self.translate] = is_finished, is_executed

def translate_threads_over(self):
while True:
threads_len = len(translate_threads)
if threads_len > 0:
for t in translate_threads:
if t.is_alive():
t.join()
translate_threads.remove(t)
else:
break

def on_translate_finished(self):
log_print('translate all complete!')
self.setDisabled(False)
is_finished, is_executed = self.qDic[self.translate]
is_finished = True
self.qDic[self.translate] = is_finished, is_executed
Expand Down
Loading