Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fixes forever #415

Merged
merged 7 commits into from
Jul 23, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 9 additions & 2 deletions aiserver.py
Original file line number Diff line number Diff line change
Expand Up @@ -7784,9 +7784,16 @@ def UI_2_update_tokens(data):
def UI_2_privacy_mode(data):
if data['enabled']:
koboldai_vars.privacy_mode = True
return

if data['password'] == koboldai_vars.privacy_password:
koboldai_vars.privacy_mode = False
else:
if data['password'] == koboldai_vars.privacy_password:
koboldai_vars.privacy_mode = False
logger.warning("Watch out! Someone tried to unlock your instance with an incorrect password! Stay on your toes...")
show_error_notification(
title="Invalid password",
text="The password you provided was incorrect. Please try again."
)

#==================================================================#
# Genres
Expand Down
13 changes: 2 additions & 11 deletions modeling/inference_models/hf_torch.py
Original file line number Diff line number Diff line change
Expand Up @@ -225,9 +225,6 @@ def new_get_logits_processor(*args, **kwargs) -> LogitsProcessorList:
)

class KoboldLogitsWarperList(LogitsProcessorList):
def __init__(self):
pass

def __call__(
lw_self,
input_ids: torch.LongTensor,
Expand All @@ -244,16 +241,10 @@ def __call__(
), f"Scores are None; processor '{processor}' is to blame"
return scores

def new_get_logits_warper(
beams: int = 1,
) -> LogitsProcessorList:
return KoboldLogitsWarperList()

def new_sample(self, *args, **kwargs):
assert kwargs.pop("logits_warper", None) is not None
kwargs["logits_warper"] = new_get_logits_warper(
beams=1,
)
kwargs["logits_warper"] = KoboldLogitsWarperList()

if utils.koboldai_vars.newlinemode in ["s", "ns"]:
kwargs["eos_token_id"] = -1
kwargs.setdefault("pad_token_id", 2)
Expand Down
12 changes: 11 additions & 1 deletion modeling/logits_processors.py
Original file line number Diff line number Diff line change
Expand Up @@ -233,6 +233,8 @@ def _get_biased_tokens(self, input_ids: List) -> Dict:
token_seqs = self._get_token_sequence(phrase)
variant_deltas = {}
for token_seq in token_seqs:
if not token_seq:
continue
bias_index = self._find_intersection(input_ids, token_seq)

# Ensure completion after completion_threshold tokens
Expand Down Expand Up @@ -267,6 +269,14 @@ def __call__(

for batch in range(scores_shape[0]):
for token, bias in self._get_biased_tokens(input_ids[batch]).items():
scores[batch][token] += bias
if bias > 0 and bool(scores[batch][token].isneginf()):
# Adding bias to -inf will do NOTHING!!! So just set it for
# now. There may be more mathishly correct way to do this
# but it'll work. Also, make sure the bias is actually
# positive. Don't give a -inf token more chance by setting
# it to -0.5!
scores[batch][token] = bias
else:
scores[batch][token] += bias

return scores
26 changes: 26 additions & 0 deletions static/koboldai.css
Original file line number Diff line number Diff line change
Expand Up @@ -1457,6 +1457,30 @@ td.server_vars {
line-height: 2;
}

/* Privacy Mode (Lock Screen) */
#privacy_mode {
height: unset;
width: unset;
position: relative;
top: unset;
left: unset;
}

#privacy_mode .popup_list_area {
display: flex;
align-items: center;
flex-direction: column;
padding-top: 10px;
padding-bottom: 10px;
padding-left: 15px;
padding-right: 15px;
}

#privacy_mode input {
margin-top: 15px;
width: 85%;
}

/* ---------------------------- OVERALL PAGE CONFIG ------------------------------*/
body {
background-color: var(--background);
Expand Down Expand Up @@ -1962,6 +1986,7 @@ body {
color: var(--popup_title_bar_color_text);
text-align: center;
font-size: calc(1.3em + var(--font_size_adjustment));
user-select: none;
}

.popup .action_button {
Expand Down Expand Up @@ -2848,6 +2873,7 @@ body {
display: flex;
justify-content: center;
align-items: center;
pointer-events: none;
}

#welcome-text-content {
Expand Down
76 changes: 56 additions & 20 deletions static/koboldai.js
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,7 @@ let story_id = -1;
var dirty_chunks = [];
var initial_socketio_connection_occured = false;
var selected_model_data;
var privacy_mode_enabled = false;

// Each entry into this array should be an object that looks like:
// {class: "class", key: "key", func: callback}
Expand Down Expand Up @@ -161,7 +162,7 @@ const shortcuts = [
{mod: "ctrl", key: "m", desc: "Focuses Memory", func: () => focusEl("#memory")},
{mod: "ctrl", key: "u", desc: "Focuses Author's Note", func: () => focusEl("#authors_notes")}, // CTRL-N is reserved :^(
{mod: "ctrl", key: "g", desc: "Focuses game text", func: () => focusEl("#input_text")},
{mod: "ctrl", key: "l", desc: '"Lock" screen (Not secure)', func: () => socket.emit("privacy_mode", {'enabled': true})},
{mod: "ctrl", key: "l", desc: '"Lock" screen (Not secure)', func: maybe_enable_privacy_mode},
{mod: "ctrl", key: "k", desc: "Finder", func: open_finder},
{mod: "ctrl", key: "/", desc: "Help screen", func: () => openPopup("shortcuts-popup")},
]
Expand Down Expand Up @@ -597,13 +598,11 @@ function do_story_text_updates(action) {
story_area.append(item);
}
}


if (action.action['Selected Text'].charAt(0) == ">") {
item.classList.add("action_mode_input");
} else {
item.classList.remove("action_mode_input");
}

item.classList.toggle(
"action_mode_input",
action.action['Selected Text'].replaceAll("\n", "")[0] === ">"
);

if ('wi_highlighted_text' in action.action) {
for (chunk of action.action['wi_highlighted_text']) {
Expand Down Expand Up @@ -3404,16 +3403,36 @@ function update_story_picture(chunk_id) {
image.setAttribute("chunk", chunk_id);
}

function maybe_enable_privacy_mode() {
const password = document.getElementById("user_privacy_password").value;

if (!password) {
showNotification(
"Lock Failed",
"Please set a password before locking KoboldAI.",
"error"
)
return;
}

socket.emit("privacy_mode", {'enabled': true})
}

function privacy_mode(enabled) {
privacy_mode_enabled = enabled;
updateTitle();

const sideMenu = document.getElementById("SideMenu");
const mainGrid = document.getElementById("main-grid");
const rightSideMenu = document.getElementById("rightSideMenu");

for (const menu of [sideMenu, mainGrid, rightSideMenu]) {
menu.classList.toggle("superblur", enabled);
}

if (enabled) {
document.getElementById('SideMenu').classList.add("superblur");
document.getElementById('main-grid').classList.add("superblur");
document.getElementById('rightSideMenu').classList.add("superblur");
openPopup("privacy_mode");
} else {
document.getElementById('SideMenu').classList.remove("superblur");
document.getElementById('main-grid').classList.remove("superblur");
document.getElementById('rightSideMenu').classList.remove("superblur");
if (!$el("#privacy_mode").classList.contains("hidden")) closePopups();
document.getElementById('privacy_password').value = "";
}
Expand Down Expand Up @@ -4710,7 +4729,7 @@ function close_menus() {
document.getElementById("main-grid").classList.remove("story_menu-open");

//close popup menus
closePopups();
closePopups(true);

//unselect sampler items
for (temp of document.getElementsByClassName("sample_order")) {
Expand Down Expand Up @@ -5811,8 +5830,15 @@ function position_context_menu(contextMenu, x, y) {

function updateTitle() {
const titleInput = $el(".var_sync_story_story_name");
if (!titleInput.innerText) return;
document.title = `${titleInput.innerText} - KoboldAI Client`;
let titleText = "Story";

if (!privacy_mode_enabled && titleInput.innerText) {
titleText = titleInput.innerText;
} else {
titleText = "[🔒]"
}

document.title = `${titleText} - KoboldAI Client`;
}

function openClubImport() {
Expand Down Expand Up @@ -5847,17 +5873,27 @@ function openPopup(id) {
}
}

function closePopups() {
function closePopups(userAction=false) {
// userAction specifies if a user tried to close the popup by normal means
// (ESC, clicking outside the menu, etc).
const container = $el("#popup-container");
container.classList.add("hidden");
let allHidden = true;

for (const popupWindow of container.children) {
// Do not let the user close windows they shouldn't be! Sneaky devils!
if (userAction && popupWindow.getAttribute("allow-close") === "false") {
allHidden = false;
continue;
}

popupWindow.classList.add("hidden");
}

if (allHidden) container.classList.add("hidden");
}

$el("#popup-container").addEventListener("click", function(event) {
if (event.target === this) closePopups();
if (event.target === this) closePopups(true);
});

/* -- Colab Cookie Handling -- */
Expand Down
4 changes: 2 additions & 2 deletions templates/popups.html
Original file line number Diff line number Diff line change
Expand Up @@ -70,12 +70,12 @@
</div>
</div>
<!---------------- Private Mode Unlock screen ---------------------->
<div id="privacy_mode" class="popup-window popup">
<div id="privacy_mode" class="popup-window popup" allow-close="false">
<div class="title">
<div class="popuptitletext">Locked</div>
</div>
<div id="popup_list_area" class="popup_list_area">
This story is in private mode. Please enter password to unlock<br/>
This story is in private mode. Please enter the password to unlock it.<br/>
<input type="password" id="privacy_password"/>
</div>
<div class="popup_load_cancel">
Expand Down
16 changes: 8 additions & 8 deletions templates/story flyout.html
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,14 @@ <h4 class="section_header">Author's Note</h4>
<label for="authors_notes">Author's Notes:</label><br/>
<textarea autocomplete="off" rows=16 id="authors_notes" class="var_sync_story_authornote var_sync_alt_story_authornote_length fullwidth" oninput="autoResize(this)" onchange='sync_to_server(this);'></textarea><br/>

<div class="setting_tile_area">
{% with menu='author_notes' %}
{% with sub_path='' %}
{% include 'settings item.html' %}
{% endwith %}
{% endwith %}
</div>

<h4 class="section_header">Genre</h4>
<div class="help_text">Styles the AI will attempt to imitate. Effectiveness depends on model.</div>
<input id="genre-input" class="fullwidth" placeholder="Fantasy" autocomplete="off" spellcheck="false">
Expand All @@ -75,14 +83,6 @@ <h4 class="section_header"><label for="An-Attention-Bias">Attention Bias Test</l
}
</script>
</div>

<div class="setting_tile_area">
{% with menu='author_notes' %}
{% with sub_path='' %}
{% include 'settings item.html' %}
{% endwith %}
{% endwith %}
</div>
</div>
</div>
<div id="story_menu_notes" class="story_category_area tab-target tab-target-story hidden">
Expand Down
2 changes: 1 addition & 1 deletion themes/tweaks/hide-welcome-logo.css
Original file line number Diff line number Diff line change
@@ -1 +1 @@
#welcome_text { display:none; pointer-events: none }
#welcome-logo { display:none; pointer-events: none }