{"id":36615,"date":"2025-11-17T20:27:06","date_gmt":"2025-11-17T19:27:06","guid":{"rendered":"https:\/\/www.graviton.at\/letterswaplibrary\/dataset-30-trillion-tokens-hplt-3-0-very-large-scale-multilingual-resources-for-llm-and-mt-mono-and-bi-lingual-data-multilingual-evaluation-and-pre-trained-models-oepen-et-al-2025\/"},"modified":"2025-11-17T20:27:06","modified_gmt":"2025-11-17T19:27:06","slug":"dataset-30-trillion-tokens-hplt-3-0-very-large-scale-multilingual-resources-for-llm-and-mt-mono-and-bi-lingual-data-multilingual-evaluation-and-pre-trained-models-oepen-et-al-2025","status":"publish","type":"post","link":"https:\/\/www.graviton.at\/letterswaplibrary\/dataset-30-trillion-tokens-hplt-3-0-very-large-scale-multilingual-resources-for-llm-and-mt-mono-and-bi-lingual-data-multilingual-evaluation-and-pre-trained-models-oepen-et-al-2025\/","title":{"rendered":"[Dataset] [30 Trillion Tokens] &#8220;HPLT 3.0: Very Large-Scale Multilingual Resources For LLM And MT. Mono- And Bi-lingual Data, Multilingual Evaluation, And Pre-Trained Models&#8221;, Oepen Et Al. 2025"},"content":{"rendered":"<p><!-- SC_OFF --><\/p>\n<div class=\"md\">\n<p><strong>Dataset(s)<\/strong>: <a href=\"https:\/\/hplt-project.org\/datasets\/v3.0\">https:\/\/hplt-project.org\/datasets\/v3.0<\/a><\/p>\n<p><strong>Paper<\/strong>: <a href=\"https:\/\/arxiv.org\/abs\/2511.01066\">https:\/\/arxiv.org\/abs\/2511.01066<\/a><\/p>\n<\/div>\n<p><!-- SC_ON -->   submitted by   <a href=\"https:\/\/www.reddit.com\/user\/RecmacfonD\"> \/u\/RecmacfonD <\/a> <br \/> <span><a href=\"https:\/\/www.reddit.com\/r\/datasets\/comments\/1ozorks\/dataset_30_trillion_tokens_hplt_30_very\/\">[link]<\/a><\/span>   <span><a href=\"https:\/\/www.reddit.com\/r\/datasets\/comments\/1ozorks\/dataset_30_trillion_tokens_hplt_30_very\/\">[comments]<\/a><\/span><\/p><div class='watch-action'><div class='watch-position align-right'><div class='action-like'><a class='lbg-style1 like-36615 jlk' href='javascript:void(0)' data-task='like' data-post_id='36615' data-nonce='65e0e39b87' rel='nofollow'><img class='wti-pixel' src='https:\/\/www.graviton.at\/letterswaplibrary\/wp-content\/plugins\/wti-like-post\/images\/pixel.gif' title='Like' \/><span class='lc-36615 lc'>0<\/span><\/a><\/div><\/div> <div class='status-36615 status align-right'><\/div><\/div><div class='wti-clear'><\/div>","protected":false},"excerpt":{"rendered":"<p>Dataset(s): https:\/\/hplt-project.org\/datasets\/v3.0 Paper: https:\/\/arxiv.org\/abs\/2511.01066 submitted by \/u\/RecmacfonD [link] [comments] 0<\/p>\n","protected":false},"author":1,"featured_media":0,"comment_status":"open","ping_status":"open","sticky":false,"template":"","format":"standard","meta":{"footnotes":""},"categories":[85],"tags":[],"class_list":["post-36615","post","type-post","status-publish","format-standard","hentry","category-datatards","wpcat-85-id"],"_links":{"self":[{"href":"https:\/\/www.graviton.at\/letterswaplibrary\/wp-json\/wp\/v2\/posts\/36615","targetHints":{"allow":["GET"]}}],"collection":[{"href":"https:\/\/www.graviton.at\/letterswaplibrary\/wp-json\/wp\/v2\/posts"}],"about":[{"href":"https:\/\/www.graviton.at\/letterswaplibrary\/wp-json\/wp\/v2\/types\/post"}],"author":[{"embeddable":true,"href":"https:\/\/www.graviton.at\/letterswaplibrary\/wp-json\/wp\/v2\/users\/1"}],"replies":[{"embeddable":true,"href":"https:\/\/www.graviton.at\/letterswaplibrary\/wp-json\/wp\/v2\/comments?post=36615"}],"version-history":[{"count":0,"href":"https:\/\/www.graviton.at\/letterswaplibrary\/wp-json\/wp\/v2\/posts\/36615\/revisions"}],"wp:attachment":[{"href":"https:\/\/www.graviton.at\/letterswaplibrary\/wp-json\/wp\/v2\/media?parent=36615"}],"wp:term":[{"taxonomy":"category","embeddable":true,"href":"https:\/\/www.graviton.at\/letterswaplibrary\/wp-json\/wp\/v2\/categories?post=36615"},{"taxonomy":"post_tag","embeddable":true,"href":"https:\/\/www.graviton.at\/letterswaplibrary\/wp-json\/wp\/v2\/tags?post=36615"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}