Skip to content

Commit

Permalink
change env vars and ingress, add prompt count retrieval to extension
Browse files Browse the repository at this point in the history
  • Loading branch information
ili16 committed Feb 16, 2024
1 parent 594aec3 commit 30b44fa
Show file tree
Hide file tree
Showing 4 changed files with 64 additions and 30 deletions.
4 changes: 2 additions & 2 deletions Kubernetes/modernizer/modernizer-backend-deployment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,11 @@ spec:
imagePullPolicy: Always
env:
- name: OLLAMA_URL
value: "http://ollama-service.ba-kovacevic:11434"
value: "https://quagga-crack-bluejay.ngrok-free.app"
- name: WEAVIATE_HOST
value: "weaviate.ba-kovacevic:80"
- name: OLLAMA_MODEL
value: "starcoder:3b"
value: "codellama:13b-instruct"
- name: WEAVIATE_KEY
valueFrom:
secretKeyRef:
Expand Down
17 changes: 1 addition & 16 deletions Kubernetes/modernizer/modernizer-ingress.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ apiVersion: networking.k8s.io/v1
metadata:
name: modernizer-ingress
annotations:
nginx.ingress.kubernetes.io/rewrite-target: /$2$3
nginx.ingress.kubernetes.io/use-regex: 'true'
spec:
tls:
Expand All @@ -14,27 +13,13 @@ spec:
- host: modernizer.milki-psy.dbis.rwth-aachen.de
http:
paths:
- path: /()(ollama)(.*)
- path: /(.*)
pathType: Prefix
backend:
service:
name: modernizer-backend-service
port:
number: 443
- path: /()(weaviate)(.*)
pathType: Prefix
backend:
service:
name: modernizer-backend-service
port:
number: 443
- path: /debug(/|$)(.*)
pathType: Prefix
backend:
service:
name: ollama-service
port:
number: 11434
status:
loadBalancer:
ingress:
Expand Down
64 changes: 56 additions & 8 deletions extension/src/CodelensProvider.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import * as vscode from 'vscode';
import fetch from 'node-fetch';

export class CodelensProvider implements vscode.CodeLensProvider {
private codeLenses: vscode.CodeLens[] = [];
Expand All @@ -15,7 +16,7 @@ export class CodelensProvider implements vscode.CodeLensProvider {
});
}

public provideCodeLenses(document: vscode.TextDocument, token: vscode.CancellationToken): vscode.CodeLens[] | Thenable<vscode.CodeLens[]> {
public async provideCodeLenses(document: vscode.TextDocument, token: vscode.CancellationToken): Promise<vscode.CodeLens[]> {
if (vscode.workspace.getConfiguration("modernizer-vscode").get("enableCodeLens", true)) {
this.codeLenses = [];
const regex = new RegExp(this.regex);
Expand All @@ -36,15 +37,22 @@ export class CodelensProvider implements vscode.CodeLensProvider {
arguments: [range, functionName]
};

const codeLens2 = new vscode.CodeLens(range);
codeLens2.command = {
title: `Retrieve similar prompts for '${matches[0]}'`,
tooltip: `A randomized and pre-built prompt will be sent to an LLM to retrieve similar prompts for '${matches[0]}'`,
command: "modernizer-vscode.codelensAction",
arguments: [range, functionName]
const promptCount = await this.fetchPromptCount(functionName); // Await the fetch operation

const codeLens2 = new vscode.CodeLens(range, {
title: `Prompt Count: ${promptCount}`, // Update title with fetched prompt count
tooltip: `Fetching prompt count for function: ${functionName}`,
command: ''
});

const codeLens3 = new vscode.CodeLens(range);
codeLens3.command = {
title: "Retrieve highest ranked response",
command: "codelens.showInformation",
arguments: ["Hello from CodeLens", "Action 1", "Action 2"]
};

this.codeLenses.push(codeLens, codeLens2);
this.codeLenses.push(codeLens, codeLens2, codeLens3);
}
}
return this.codeLenses;
Expand All @@ -58,4 +66,44 @@ export class CodelensProvider implements vscode.CodeLensProvider {
}
return null;
}

private async fetchPromptCount(functionName: string): Promise<number | string> {
const promptCountURL: string = 'https://modernizer.milki-psy.dbis.rwth-aachen.de/weaviate/promptcount';
const queryParams = new URLSearchParams({ query: functionName });
const url = `${promptCountURL}?${queryParams.toString()}`;

const response = await fetch(url);
if (!response.ok) {
return "0";
}

const data = await response.json();
return data;
}
}

// Register command to show information box
let disposable = vscode.commands.registerCommand('codelens.showInformation', (message: string, action1: string, action2: string) => {
const options: vscode.MessageItem[] = [
{ title: `👍 Upvote` },
{ title: `👎 Downvote` }
];
vscode.window.showInformationMessage(message, ...options).then(selection => {
if (selection) {
if (selection.title.startsWith('👍')) {
vscode.window.showInformationMessage("Upvote selected");
} else if (selection.title.startsWith('👎')) {
vscode.window.showInformationMessage("Downvote selected");
}
}
});
});

export function activate(context: vscode.ExtensionContext) {
// Register the CodeLens provider
context.subscriptions.push(vscode.languages.registerCodeLensProvider('*', new CodelensProvider()));
// Add disposables to context subscriptions
context.subscriptions.push(disposable);
}

export function deactivate() {}
9 changes: 5 additions & 4 deletions extension/src/extension.ts
Original file line number Diff line number Diff line change
Expand Up @@ -42,14 +42,14 @@ export function activate(context: ExtensionContext) {
const functionCode = activeEditor.document.getText(selectedFunctionRange);

// Send the function code as a prompt to the Ollama API
const response = await fetch('https://modernizer.milki-psy.dbis.rwth-aachen.de/ollama/api/generate', {
const response = await fetch('https://modernizer.milki-psy.dbis.rwth-aachen.de/generate', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({
model: 'codellama:7b-instruct',
prompt: `What does this function do: '${functionCode}'`,
model: 'codellama:13b-instruct',
prompt: `${functionCode}`,
stream: false
})
});
Expand All @@ -60,7 +60,7 @@ export function activate(context: ExtensionContext) {
if (contentType && contentType.includes('application/json')) {
// Parse and display the Ollama response
const responseBody = await response.json();
const responseText = responseBody.response || 'No response field found';
const responseText = responseBody || 'No response field found';

const outputWindow = vscode.window.createOutputChannel('Ollama Response');
outputWindow.show(true);
Expand All @@ -84,6 +84,7 @@ export function activate(context: ExtensionContext) {

}


// this method is called when your extension is deactivated
export function deactivate() {
if (disposables) {
Expand Down

0 comments on commit 30b44fa

Please sign in to comment.