Compare commits

...

4 Commits

Author SHA1 Message Date
henk717 685ec3237b
Merge pull request #158 from VE-FORBRYDERNE/tokenizer
Fix tokenizer selection code
2022-09-26 21:34:26 +02:00
henk717 39bd02a40e
Merge pull request #157 from VE-FORBRYDERNE/patch
Fix `|` character sometimes appearing in editor
2022-09-26 21:34:16 +02:00
vfbd 7fba1fd28a Fix tokenizer selection code 2022-09-26 14:37:25 -04:00
vfbd ddc9be00d6 Attempt to fix issue where `|` appears in editor after pressing enter 2022-09-26 13:57:44 -04:00
4 changed files with 47 additions and 50 deletions

View File

@ -1661,7 +1661,6 @@ if(not vars.use_colab_tpu and vars.model not in ["InferKit", "Colab", "OAI", "Go
try:
tokenizer = AutoTokenizer.from_pretrained(vars.custmodpth, revision=vars.revision, cache_dir="cache")
except Exception as e:
pass
try:
tokenizer = AutoTokenizer.from_pretrained(vars.custmodpth, revision=vars.revision, cache_dir="cache", use_fast=False)
except Exception as e:
@ -1677,7 +1676,6 @@ if(not vars.use_colab_tpu and vars.model not in ["InferKit", "Colab", "OAI", "Go
try:
tokenizer = AutoTokenizer.from_pretrained("models/{}".format(vars.model.replace('/', '_')), revision=vars.revision, cache_dir="cache")
except Exception as e:
pass
try:
tokenizer = AutoTokenizer.from_pretrained("models/{}".format(vars.model.replace('/', '_')), revision=vars.revision, cache_dir="cache", use_fast=False)
except Exception as e:
@ -1706,7 +1704,6 @@ if(not vars.use_colab_tpu and vars.model not in ["InferKit", "Colab", "OAI", "Go
try:
tokenizer = AutoTokenizer.from_pretrained(vars.model, revision=vars.revision, cache_dir="cache")
except Exception as e:
pass
try:
tokenizer = AutoTokenizer.from_pretrained(vars.model, revision=vars.revision, cache_dir="cache", use_fast=False)
except Exception as e:

View File

@ -1483,16 +1483,18 @@ function chunkOnBeforeInput(event) {
if(buildChunkSetFromNodeArray(getSelectedNodes()).size === 0) {
var s = rangy.getSelection();
var r = s.getRangeAt(0);
var rand = Math.random();
if(document.queryCommandSupported && document.execCommand && document.queryCommandSupported('insertHTML')) {
document.execCommand('insertHTML', false, '<span id="_EDITOR_SENTINEL_">|</span>');
document.execCommand('insertHTML', false, '<span id="_EDITOR_SENTINEL_' + rand + '_">|</span>');
} else {
var t = document.createTextNode('|');
var b = document.createElement('span');
b.id = "_EDITOR_SENTINEL_";
b.id = "_EDITOR_SENTINEL_" + rand + "_";
b.insertNode(t);
r.insertNode(b);
}
var sentinel = document.getElementById("_EDITOR_SENTINEL_");
setTimeout(function() {
var sentinel = document.getElementById("_EDITOR_SENTINEL_" + rand + "_");
if(sentinel.nextSibling && sentinel.nextSibling.tagName === "CHUNK") {
r.selectNodeContents(sentinel.nextSibling);
r.collapse(true);
@ -1503,6 +1505,7 @@ function chunkOnBeforeInput(event) {
s.removeAllRanges();
s.addRange(r);
sentinel.parentNode.removeChild(sentinel);
}, 1);
}
}

View File

@ -17,7 +17,7 @@
<script src="static/bootstrap.min.js"></script>
<script src="static/bootstrap-toggle.min.js"></script>
<script src="static/rangy-core.min.js"></script>
<script src="static/application.js?ver=1.18.1a"></script>
<script src="static/application.js?ver=1.18.1d"></script>
</head>
<body>
<input type="file" id="remote-save-select" accept="application/json" style="display:none">

View File

@ -1334,7 +1334,6 @@ def load_model(path: str, driver_version="tpu_driver0.1_dev20210607", hf_checkpo
try:
tokenizer = AutoTokenizer.from_pretrained(vars.custmodpth, revision=vars.revision, cache_dir="cache")
except Exception as e:
pass
try:
tokenizer = AutoTokenizer.from_pretrained(vars.custmodpth, revision=vars.revision, cache_dir="cache", use_fast=False)
except Exception as e:
@ -1350,7 +1349,6 @@ def load_model(path: str, driver_version="tpu_driver0.1_dev20210607", hf_checkpo
try:
tokenizer = AutoTokenizer.from_pretrained("models/{}".format(vars.model.replace('/', '_')), revision=vars.revision, cache_dir="cache")
except Exception as e:
pass
try:
tokenizer = AutoTokenizer.from_pretrained("models/{}".format(vars.model.replace('/', '_')), revision=vars.revision, cache_dir="cache", use_fast=False)
except Exception as e:
@ -1366,7 +1364,6 @@ def load_model(path: str, driver_version="tpu_driver0.1_dev20210607", hf_checkpo
try:
tokenizer = AutoTokenizer.from_pretrained(vars.model, revision=vars.revision, cache_dir="cache")
except Exception as e:
pass
try:
tokenizer = AutoTokenizer.from_pretrained(vars.model, revision=vars.revision, cache_dir="cache", use_fast=False)
except Exception as e: