You can view the full code here: https://pastebin.com/VsXLtucL
Install Python. Then install the following two libraries using the Command Prompt (cmd) interpreter in Windows10:
py- m pip install google-cloud-translate py -m pip install beautifulsoup4
Python will automatically translate the following html tags:
<title>Your Text</title> <meta name="description" content="Your Text"/>
Also, Python code will also automatically translate the contents of the following tags (Your Text), but only if these tags are framed by < ! -- ARTICOL START --> and < ! -- ARTICOL START --> html comments. Of course, you will need to replace these tags with your own tags.
<!-- ARTICOL START --> <h1 class="den_articol" itemprop="name">Your Text</h1> <p class="text_obisnuit">Your Text</p> <p class="text_obisnuit2">Your Text</p> <span class="text_obisnuit2">Your Text</span> <span class="text_obisnuit">Your Text</span> <li class="text_obisnuit">Your Text</li> <a class="linkMare" href="https://neculaifantanaru.com/en/">Your Text</a> <h4 class="text_obisnuit2>Your Text</h4> <h3 class="text_obisnuit2>Your Text</h3> <h5 class="text_obisnuit2>Your Text</h5> <!-- ARTICOL FINAL -->
You will need a file with the extension .json (I renamed it secret.json) which you can get from https://console.cloud.google.com/ Follow this tutorial on how to get Google Api Key.
Copy the secret.json file to the same folder as the code below yourcode.py
The CODE: Copy and run the code below in any interpreter program (I use pyScripter) . Don't forget to change the path in the "files_from_folder" line.
from bs4 import BeautifulSoup from bs4.formatter import HTMLFormatter import requests import json import os import six from google.cloud import translate_v2 as translate class UnsortedAttributes(HTMLFormatter): def attributes(self, tag): for k, v in tag.attrs.items(): yield k, v def translate_text(target, text): """Translates text into the target language. Target must be an ISO 639-1 language code. See https://g.co/cloud/translate/v2/translate-reference#supported_languages """ os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = "secret.json" translate_client = translate.Client() if isinstance(text, six.binary_type): text = text.decode("utf-8") # Text can also be a sequence of strings, in which case this method # will return a sequence of results for each text. result = translate_client.translate(text, target_language=target) return result["translatedText"] files_from_folder = r"C:\test" source_language = 'EN' use_translate_folder = False destination_language = 'ZH' extension_file = ".html" import os directory = os.fsencode(files_from_folder) def recursively_translate(node): for x in range(len(node.contents)): if isinstance(node.contents[x], str): if node.contents[x].strip() != '': try: newtext = translate_text(destination_language, node.contents[x]) node.contents[x].replaceWith(newtext) except: pass elif node.contents[x] != None: recursively_translate(node.contents[x]) for file in os.listdir(directory): filename = os.fsdecode(file) print(filename) if filename == 'y_key_e479323ce281e459.html' or filename == 'TS_4fg4_tr78.html': continue if filename.endswith(extension_file): with open(os.path.join(files_from_folder, filename), encoding='utf-8') as html: soup = BeautifulSoup('<pre>' + html.read() + '</pre>', 'html.parser') for title in soup.findAll('title'): recursively_translate(title) for meta in soup.findAll('meta', {'name':'description'}): try: newtext = translate_text(destination_language, meta['content']) meta['content'] = newtext except: pass for h1 in soup.findAll('h1', {'itemprop':'name'}, class_='den_articol'): begin_comment = str(soup).index('<!-- ARTICOL START -->') end_comment = str(soup).index('<!-- ARTICOL FINAL -->') if begin_comment < str(soup).index(str(h1)) < end_comment: recursively_translate(h1) for p in soup.findAll('p', class_='text_obisnuit'): begin_comment = str(soup).index('<!-- ARTICOL START -->') end_comment = str(soup).index('<!-- ARTICOL FINAL -->') if begin_comment < str(soup).index(str(p)) < end_comment: recursively_translate(p) for p in soup.findAll('p', class_='text_obisnuit2'): begin_comment = str(soup).index('<!-- ARTICOL START -->') end_comment = str(soup).index('<!-- ARTICOL FINAL -->') if begin_comment < str(soup).index(str(p)) < end_comment: recursively_translate(p) for span in soup.findAll('span', class_='text_obisnuit2'): begin_comment = str(soup).index('<!-- ARTICOL START -->') end_comment = str(soup).index('<!-- ARTICOL FINAL -->') if begin_comment < str(soup).index(str(span)) < end_comment: recursively_translate(span) for li in soup.findAll('li', class_='text_obisnuit'): begin_comment = str(soup).index('<!-- ARTICOL START -->') end_comment = str(soup).index('<!-- ARTICOL FINAL -->') if begin_comment < str(soup).index(str(li)) < end_comment: recursively_translate(li) for a in soup.findAll('a', class_='linkMare'): begin_comment = str(soup).index('<!-- ARTICOL START -->') end_comment = str(soup).index('<!-- ARTICOL FINAL -->') if begin_comment < str(soup).index(str(a)) < end_comment: recursively_translate(a) for h4 in soup.findAll('h4', class_='text_obisnuit2'): begin_comment = str(soup).index('<!-- ARTICOL START -->') end_comment = str(soup).index('<!-- ARTICOL FINAL -->') if begin_comment < str(soup).index(str(h4)) < end_comment: recursively_translate(h4) for h5 in soup.findAll('h5', class_='text_obisnuit2'): begin_comment = str(soup).index('<!-- ARTICOL START -->') end_comment = str(soup).index('<!-- ARTICOL FINAL -->') if begin_comment < str(soup).index(str(h5)) < end_comment: recursively_translate(h5) print(f'{filename} translated') soup = soup.encode(formatter=UnsortedAttributes()).decode('utf-8') new_filename = f'{filename.split(".")[0]}_{destination_language}.html' if use_translate_folder: try: with open(os.path.join(files_from_folder+r'\translated', new_filename), 'w', encoding='utf-8') as new_html: new_html.write(soup[5:-6]) except: os.mkdir(files_from_folder+r'\translated') with open(os.path.join(files_from_folder+r'\translated', new_filename), 'w', encoding='utf-8') as new_html: new_html.write(soup[5:-6]) else: with open(os.path.join(files_from_folder, new_filename), 'w', encoding='utf-8') as html: html.write(soup[5:-6])
That's all folks.
Also, see this VERSION 2 or VERSION 3 or VERSION 4 or VERSION 5 or VERSION 6 or VERSION 7