clearance

This commit is contained in:
teraflops 2025-06-02 23:33:53 +02:00
parent 4da632bbf7
commit 53675c9b03
Signed by: teraflops
GPG Key ID: 2B77D97AF6F8968C
5 changed files with 0 additions and 422 deletions

View File

@ -1,93 +0,0 @@
vim.loader.enable()
require("set")
require("remap")
require("autocmd")
require("lazy_init")
-- Cargar configuración de tipos de archivo
pcall(require, 'filetype') -- Ignorar errores si filetype.lua no existe
function ReplaceWordUnderCursor()
local word = vim.fn.expand("<cword>")
local replace = vim.fn.input("Replace \"" .. word .. "\" with: ")
if replace ~= "" then
vim.cmd("%s/\\V" .. word .. "/" .. replace .. "/g")
end
end
vim.api.nvim_set_keymap("n", "<Leader>r", ":lua ReplaceWordUnderCursor()<CR>", { noremap = true, silent = true })
function ReplacePhraseUnderCursor()
-- Obtén la palabra o frase bajo el cursor en modo visual
local phrase = vim.fn.input("Replace phrase: ")
if phrase == "" then
print("No phrase provided.")
return
end
local replace = vim.fn.input("Replace \"" .. phrase .. "\" with: ")
if replace ~= "" then
vim.cmd("%s/\\V" .. vim.fn.escape(phrase, "/\\") .. "/" .. replace .. "/g")
end
end
vim.api.nvim_set_keymap("n", "<Leader>f", ":lua ReplacePhraseUnderCursor()<CR>", { noremap = true, silent = true })
vim.filetype.add({
extension = {
yml = "ansible",
yaml = "ansible",
},
-- Puedes agregar otras asociaciones si es necesario
})
vim.cmd([[
autocmd BufRead,BufNewFile *.yml,*.yaml set filetype=ansible
]])
require("catppuccin").setup({
flavour = "macchiato", -- Puedes elegir entre 'latte', 'frappe', 'macchiato', 'mocha'
background = {
light = "latte",
dark = "macchiato",
},
-- Otras configuraciones de Catppuccin...
custom_highlights = function(colors)
return {
-- Personalizar DiagnosticWarn con un tono de rojo menos intrusivo
DiagnosticWarn = {
fg = colors.red6, -- Tono de rojo menos brillante
bg = "NONE",
bold = false,
italic = false,
},
DiagnosticVirtualTextWarn = {
fg = colors.red6,
bg = "NONE",
italic = false,
},
-- Opcional: Personalizar otros grupos de diagnóstico si lo deseas
-- DiagnosticError = {
-- fg = colors.red,
-- bg = "NONE",
-- bold = true,
-- italic = false,
-- },
-- DiagnosticInfo = {
-- fg = colors.blue,
-- bg = "NONE",
-- bold = false,
-- italic = false,
-- },
-- DiagnosticHint = {
-- fg = colors.cyan,
-- bg = "NONE",
-- bold = false,
-- italic = false,
-- },
}
end,
})
-- Aplicar el esquema de colores
vim.cmd.colorscheme("catppuccin")

View File

@ -1,15 +0,0 @@
-- ~/.config/nvim/lua/plugins/chatgpt.lua
return {
"jackMort/ChatGPT.nvim",
dependencies = {
"MunifTanjim/nui.nvim",
"nvim-lua/plenary.nvim",
"nvim-telescope/telescope.nvim",
},
config = function()
require("chatgpt").setup({
-- Configuración personalizada (opcional)
})
end,
}

View File

@ -1,97 +0,0 @@
---@author tomkoid
---@license MIT
-- Definir la lista principal de plugins
local plugins = {
"tpope/vim-sleuth", -- Detect tabstop and shiftwidth automatically
-- "gc" to comment visual regions/lines
{ "numToStr/Comment.nvim", opts = {} },
{ -- Adds git related signs to the gutter, as well as utilities for managing changes
"lewis6991/gitsigns.nvim",
opts = {},
},
{
-- If you want to see what colorschemes are already installed, you can use `:Telescope colorscheme`.
"catppuccin/nvim",
priority = 1000, -- Make sure to load this before all the other start plugins.
init = function()
-- Load the colorscheme here.
-- Like many other themes, this one has different styles, and you could load
-- any other, such as 'tokyonight-storm', 'tokyonight-moon', or 'tokyonight-day'.
vim.cmd.colorscheme("catppuccin-mocha")
-- You can configure highlights by doing something like:
vim.cmd.hi("Comment gui=none")
end,
},
-- Highlight todo, notes, etc in comments
{
"folke/todo-comments.nvim",
event = "VimEnter",
dependencies = { "nvim-lua/plenary.nvim" },
opts = { signs = false },
},
{
"nvim-lualine/lualine.nvim",
opts = {
options = {
disabled_filetypes = { "NERDTree", "NvimTree_1" },
},
},
},
{ "vimwiki/vimwiki" },
{
"NvChad/nvim-colorizer.lua",
config = function()
require("colorizer").setup()
end,
}, -- colorize hex colors
{
"f-person/git-blame.nvim",
config = function()
require("gitblame").setup({ enabled = true })
end,
},
{
"lambdalisue/suda.vim",
},
{ "windwp/nvim-ts-autotag" },
-- Lazy.nvim
{
"hiasr/vim-zellij-navigator.nvim",
config = function()
require("vim-zellij-navigator").setup()
end,
},
{
"danymat/neogen",
config = function()
local neogen = require("neogen")
neogen.setup({
snippet_engine = "luasnip",
})
local opts = { noremap = true, silent = true }
vim.keymap.set("n", "<leader>nc", function()
neogen.generate({ snippet_engine = "luasnip" })
end, opts)
end,
},
}
-- Cargar los plugins de LSP desde 'lua/plugins/lsp.lua'
local lsp_plugins = require('plugins.lsp')
-- Combinar ambas listas de plugins
local combined_plugins = vim.list_extend(plugins, lsp_plugins)
-- Retornar la lista combinada para Lazy.nvim
return combined_plugins

View File

@ -1,143 +0,0 @@
import weechat
import requests
import json
from requests.auth import HTTPBasicAuth
"""
Ollama Bot for WeeChat (Remote Version with Authentication)
This script automatically responds to mentions in channels and private messages using an Ollama LLM hosted remotely.
Features:
- Responds to mentions in channels.
- Can respond to private messages if enabled.
- Allows manual queries using the /ollama command.
- Configurable via WeeChat /set commands.
Usage:
- To ask a question manually:
/ollama What is Python?
- To enable or disable automatic responses in channels:
/set plugins.var.python.ollama.highlight_response on # Enable responses in channels
/set plugins.var.python.ollama.highlight_response off # Disable responses in channels
- To enable or disable automatic responses in private messages:
/set plugins.var.python.ollama.pm_response on # Enable PM responses
/set plugins.var.python.ollama.pm_response off # Disable PM responses
Dependencies:
- Requires an Ollama server running at https://ollama.priet.us/api/generate with authentication.
"""
# Script metadata
SCRIPT_NAME = "ollama"
SCRIPT_AUTHOR = "teraflops"
SCRIPT_VERSION = "2.1"
SCRIPT_LICENSE = "MIT"
SCRIPT_DESC = "Automatically responds to mentions using Ollama and allows manual queries, including PMs"
OLLAMA_API_URL = "https://ollama.priet.us/api/generate"
OLLAMA_USER = "nginx-user"
OLLAMA_PASS = "wasamasa123" # Replace with the actual password
# Register the script
weechat.register(SCRIPT_NAME, SCRIPT_AUTHOR, SCRIPT_VERSION, SCRIPT_LICENSE, SCRIPT_DESC, "", "")
# Script configuration in Weechat
def setup_config():
if not weechat.config_is_set_plugin("highlight_response"):
weechat.config_set_plugin("highlight_response", "on") # Enable auto-responses by default
if not weechat.config_is_set_plugin("pm_response"):
weechat.config_set_plugin("pm_response", "off") # Disable PM responses by default
setup_config()
def ask_ollama(message):
"""Send a query to Ollama and return the complete response."""
try:
data = {"model": "gemma:2b", "prompt": message, "stream": False}
headers = {"Content-Type": "application/json", "User-Agent": "WeeChat-OllamaBot/1.0"}
weechat.prnt("", f"[DEBUG] Sending request to Ollama: {OLLAMA_API_URL} with prompt: {message}")
response = requests.post(
OLLAMA_API_URL,
json=data,
headers=headers,
auth=HTTPBasicAuth(OLLAMA_USER, OLLAMA_PASS),
verify=False # Change to True if you have a valid certificate
)
if response.status_code == 401:
return "Authentication Error: Check username/password."
elif response.status_code == 403:
return "Permission Denied: Verify API access."
elif response.status_code != 200:
return f"HTTP Error {response.status_code}: {response.text}"
response_json = response.json()
return response_json.get("response", "No response received from Ollama.")
except requests.exceptions.RequestException as e:
return f"Error connecting to Ollama: {str(e)}"
def command_ollama(data, buffer, args):
"""Command /ollama to manually ask Ollama a question."""
weechat.prnt("", f"[DEBUG] /ollama command received with args: {args}")
if not args:
weechat.prnt(buffer, "[Ollama] Usage: /ollama <question>")
return weechat.WEECHAT_RC_OK
response = ask_ollama(args)
weechat.prnt(buffer, f"[Ollama] {response}")
return weechat.WEECHAT_RC_OK
def message_callback(data, buffer, date, tags, displayed, highlight, prefix, message):
"""Detect mentions in channels or private messages and respond automatically with Ollama."""
if weechat.config_get_plugin("highlight_response") == "off":
return weechat.WEECHAT_RC_OK
buffer_type = weechat.buffer_get_string(buffer, "localvar_type")
is_private = buffer_type == "private"
username = weechat.info_get("irc_nick", "") # Get the current IRC username
is_mentioned = f"@{username.lower()}" in message.lower() # Ensure @username is explicitly mentioned
# Ignore private messages if pm_response is off
if is_private and weechat.config_get_plugin("pm_response") == "off":
return weechat.WEECHAT_RC_OK
# Only respond in private messages if it's a direct question
if is_private and not message.strip().endswith("?"):
return weechat.WEECHAT_RC_OK
# Only respond in channels if explicitly mentioned or highlighted
if not is_private and not is_mentioned and not int(highlight):
return weechat.WEECHAT_RC_OK
response = ask_ollama(message)
if is_private:
weechat.command(buffer, f"/msg {prefix} {response}") # Reply to private message
else:
weechat.command(buffer, f"/say {response}") # Reply in the channel
return weechat.WEECHAT_RC_OK
def config_callback(data, option, value):
"""Callback for Weechat configuration changes."""
weechat.prnt("", f"[Ollama] Configuration changed: {option} = {value}")
return weechat.WEECHAT_RC_OK
# Register configuration with /set
weechat.config_set_desc_plugin("highlight_response", "Automatically respond to mentions in channels (on/off)")
weechat.config_set_desc_plugin("pm_response", "Automatically respond to private messages (on/off)")
weechat.hook_config("plugins.var.python.ollama.highlight_response", "config_callback", "")
weechat.hook_config("plugins.var.python.ollama.pm_response", "config_callback", "")
# Register commands and hooks
weechat.hook_command("ollama", "Ask something to Ollama", "<question>", "Example: /ollama What is Python?", "", "command_ollama", "")
weechat.hook_print("", "notify_highlight", "", 1, "message_callback", "")
weechat.hook_print("", "notify_message", "", 1, "message_callback", "")
weechat.hook_print("", "notify_private", "", 1, "message_callback", "")

View File

@ -1,74 +0,0 @@
#!/usr/bin/env python3
import subprocess
import json
import gi
gi.require_version("Gtk", "3.0")
gi.require_version("Gdk", "3.0")
from gi.repository import Gtk, Gdk
class ScratchpadPanel(Gtk.Window):
def __init__(self):
super().__init__(title="Scratchpad Windows")
self.set_border_width(10)
self.set_default_size(300, 200)
self.set_keep_above(True)
self.set_type_hint(Gdk.WindowTypeHint.DIALOG)
self.set_position(Gtk.WindowPosition.CENTER)
self.set_resizable(False)
self.box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL, spacing=6)
self.add(self.box)
self.populate()
def populate(self):
clients = self.get_scratchpad_clients()
if not clients:
label = Gtk.Label(label="No hay ventanas en el scratchpad.")
self.box.pack_start(label, True, True, 0)
else:
for client in clients:
title = client.get("title", "Sin título")
app = client.get("class", "App")
address = client.get("address")
btn = Gtk.Button(label=f"{app} - {title}")
btn.connect("clicked", self.on_button_clicked, address)
self.box.pack_start(btn, True, True, 0)
# Agregar botón "Salir" al final
quit_btn = Gtk.Button(label="❌ Salir")
quit_btn.connect("clicked", self.on_quit_clicked)
self.box.pack_start(quit_btn, False, False, 10)
self.show_all()
def on_button_clicked(self, button, address):
subprocess.run(["hyprctl", "dispatch", "togglespecialworkspace", "scratchpad"])
subprocess.run(["hyprctl", "dispatch", "focuswindow", address])
self.destroy()
def on_quit_clicked(self, button):
self.destroy()
def get_scratchpad_clients(self):
try:
result = subprocess.run(["hyprctl", "clients", "-j"], capture_output=True, text=True)
clients = json.loads(result.stdout)
scratchpad = [
c for c in clients
if c.get("workspace", {}).get("name") == "special:scratchpad"
]
return scratchpad
except Exception as e:
print(f"Error obteniendo clientes: {e}")
return []
def main():
win = ScratchpadPanel()
win.connect("destroy", Gtk.main_quit)
Gtk.main()
if __name__ == "__main__":
main()