Ollama LLM Fill in the Middle Code Completion

This forum allows you to share scripts with other Zeus users. Please do not post bug reports, feature requests or questions to this forum, but rather use it exclusively for posting scripts or for the discussion of scripts that have been posted.
Post Reply
jussij
Site Admin
Posts: 2650
Joined: Fri Aug 13, 2004 5:10 pm

Ollama LLM Fill in the Middle Code Completion

Post by jussij »

NOTE: The latest version of Zeus includes scripts that allow it to integrate with Gemini AI, OpenAI and Ollama.

These features can be found on the new Macros, AI menu shown below:
ai-macros.png
ai-macros.png (94.16 KiB) Viewed 17 times
IMPORTANT: Before trying to use this macro, make sure you are running the most recent version of Zeus: https://www.zeusedit.com/download.html

Consider a Python file that contains the following text:
fim-before.png
fim-before.png (7.53 KiB) Viewed 17 times
With the cursor on the line shown in the image, running the auto complete macro will result in this output response:
fim-after.png
fim-after.png (12.32 KiB) Viewed 17 times
NOTE: Instructions on how to run these macros can be found in the header section of each macro script found in the Zeus zScript folder.

Legacy Macro
NOTE: The Ollama FIM macro shown below is now legacy as it has been superseded by the macros that come with the installer.

Here is the Ollama LLM Fill in the Middle Code Completion script that produces this code completion behaviour:

Code: Select all

#
#        Name: Ollama Fill in the Middle Code Completion
#
#  Description: This macro will provide LLM Fill in the Middle code completion embedding the
#               result inside the document.
#
#  Setup:
#
#     1. Download and install ollama found here: https://ollama.com/download/windows
#
#     2. Form inside Zeus open a DOS command line prompt using the tools menu, and then
#        run the following commands.
#
#     3. Install the ollama Python package by running this command:
#
#            pip install ollama
#
#     4. Pick a model to be installed: https://ollama.com/library
#
#     5. Install the model:
#
#            ollama run codellama:7b-code
#
#     6. Make sure ollama server is running:
#
#            ollama serve
#
#  Usage:
#     To use, load the macro (i.e. F9 key), position the cursor and then run macro using
#     the Macro Execute (i.e. F8 key)
#
#     Alternatively use the Options, Editor Options menu and in the Keyboard section bind
#     the macro to the keyboard.
#
#  Example: Open a Python file inside Zeus and enter the following code int that document:
#
#               def remove_non_ascii(s: str) -> str:
#                   |
#                   return result
#
#         Note: The '|' symbol in that code above represents the location of the cursor
#
#         Running the macro should then result in the following output being entered into
#         the file:
#
#               def remove_non_ascii(s: str) -> str:
#                   """Remove non-ASCII characters from a string."""
#                   if s is None:
#                       return ''
#                   result = ''.join([i if ord(i) < 128 else '' for i in s])
#                   return result
#
import os
import re
import time
import zeus
import ollama
import asyncio

def atoi(str):
    resultant = 0
    for i in range(len(str)):
        resultant = resultant * 10 + (ord(str[i]) - ord('0'))
    return resultant

def ollama_generate(prompt, suffix):
    # set the library to used: https://ollama.com/library
    model='codellama:7b-code'

    # helps debugging
    #zeus.message_box(1, prompt, suffix)

    zeus.message('Starting ollama generate....')

    try:
        response = ollama.generate(model=model,
                                   prompt=prompt,
                                   suffix=suffix,
                                   options={
                                    'num_predict': 128,
                                    'temperature': 0,
                                    'top_p': 0.9,
                                    'stop': ['<EOT>'],
                                   },
                                   stream=True
                                  )

        # some sleep needed and clear the wait cursor
        zeus.yieldTask(100, 1)

        result = ""
        status = "Thinking (ESC to cancel)."
        message = status
        VK_ESCAPE = 27   # 0x1B scan code

        for chunk in response:
            result = result + chunk['response']

            # some sleep need and clear the wait cursor
            zeus.yieldTask(100, 1)

            message = message + '.'

            if len(message) > 150:
                message = status

            if (zeus.key_down(VK_ESCAPE) == 1):
                zeus.message("Operation cancelled by user.")
                return False, '';

            zeus.message(message)
            #zeus.message(result)

        zeus.message('')

    except Exception as e:
        zeus.message("Failed to connect to ollama. Use 'ollama serve' to make sure the service is running.")
        zeus.beep()

    return True, result

def key_macro():
    # macro only works for documents
    document = zeus.is_document()

    # macro only works for read/write documents.
    locked = zeus.is_read_only()

    if (locked == 1) or (document == 0):
      zeus.message("This macro only works with named, writable documents.")
      zeus.beep()
      return 0

    window_id = zeus.get_window_id()

    line_current = zeus.get_line_pos()

    zeus.beep_disable()
    zeus.screen_update_disable()

    zeus.cursor_save()
    zeus.FunctionFindPrevious()
    line_start = zeus.get_line_pos()
    zeus.cursor_restore()

    if line_start == line_current:
        prefix = zeus.get_line_text(line_start)
    else:
        prefix = zeus.get_line_text(line_start, line_current - line_start)
        text_current = zeus.get_line_text(line_current)

        if len(text_current) == 0:
            tab_size = zeus.macro_tag('$TabSize')
            text_current = b' ' * atoi(tab_size.decode('utf8'))

        prefix = prefix + b'\n' + text_current

    zeus.cursor_save()
    zeus.FunctionFindNext()
    line_end = zeus.get_line_pos()
    zeus.cursor_restore()

    zeus.screen_update_enable()
    zeus.screen_update()
    zeus.beep_enable()

    suffix = b'\n'
    if (line_end - line_current) > 0:
        suffix = suffix + zeus.get_line_text(line_current + 1, line_end - line_current + 1)
    else:
        suffix = suffix + zeus.get_line_text(line_current + 1, zeus.get_line_count() - line_current)

    suffix = suffix + b'\n'

    #zeus.message_box("line_current: " + str(line_current) + " line_start: " + str(line_start) + "\nprefix: '" + prefix.decode('utf8') + "'")
    #zeus.message_box("line_end: " + str(line_end) + "\nsuffix: '" + suffix.decode('utf8') + "'")

    if len(prefix) == 0:
        zeus.message('No prefix found. Place the cursor inside a function that needs to be completed.')
        zeus.beep()
    else:
        completed, response = ollama_generate(prefix.decode('utf8'), suffix.decode('utf8'))

        if completed == True:
            # makes sure we return the the original window
            zeus.window_activate(window_id)

            # insert the response
            zeus.line_append(response, 1)

key_macro() # run the macro
Post Reply