@@ -9,16 +9,49 @@ import { ollamaCheckModel } from '../modules/ollamaCheckModel';
99import { ollamaDownloadModel } from '../modules/ollamaDownloadModel' ;
1010import { config } from '../config' ;
1111
12+ type Status = {
13+ icon : string ;
14+ text : string ;
15+ } ;
16+
1217export class PromptProvider implements vscode . InlineCompletionItemProvider {
1318
1419 lock = new AsyncLock ( ) ;
1520 statusbar : vscode . StatusBarItem ;
1621 context : vscode . ExtensionContext ;
22+ private _paused : boolean = false ;
23+ private _status : Status = { icon : "chip" , text : "Llama Coder" } ;
1724
1825 constructor ( statusbar : vscode . StatusBarItem , context : vscode . ExtensionContext ) {
1926 this . statusbar = statusbar ;
2027 this . context = context ;
2128 }
29+
30+ public set paused ( value : boolean ) {
31+ this . _paused = value ;
32+ this . update ( ) ;
33+ }
34+
35+ public get paused ( ) : boolean {
36+ return this . _paused ;
37+ }
38+
39+ private update ( icon ?: string , text ?: string ) : void {
40+ this . _status . icon = icon ? icon : this . _status . icon ;
41+ this . _status . text = text ? text : this . _status . text ;
42+
43+ let statusText = '' ;
44+ let statusTooltip = '' ;
45+ if ( this . _paused ) {
46+ statusText = `$(sync-ignored) ${ this . _status . text } ` ;
47+ statusTooltip = `${ this . _status . text } (Paused)` ;
48+ } else {
49+ statusText = `$(${ this . _status . icon } ) ${ this . _status . text } ` ;
50+ statusTooltip = `${ this . _status . text } ` ;
51+ }
52+ this . statusbar . text = statusText ;
53+ this . statusbar . tooltip = statusTooltip ;
54+ }
2255
2356 async delayCompletion ( delay : number , token : vscode . CancellationToken ) : Promise < boolean > {
2457 if ( config . inference . delay < 0 ) {
@@ -37,6 +70,9 @@ export class PromptProvider implements vscode.InlineCompletionItemProvider {
3770 }
3871
3972 try {
73+ if ( this . paused ) {
74+ return ;
75+ }
4076
4177 // Ignore unsupported documents
4278 if ( ! isSupported ( document ) ) {
@@ -82,7 +118,7 @@ export class PromptProvider implements vscode.InlineCompletionItemProvider {
82118 let inferenceConfig = config . inference ;
83119
84120 // Update status
85- this . statusbar . text = `$( sync~spin) Llama Coder` ;
121+ this . update ( ' sync~spin' , ' Llama Coder' ) ;
86122 try {
87123
88124 // Check model exists
@@ -110,9 +146,9 @@ export class PromptProvider implements vscode.InlineCompletionItemProvider {
110146 }
111147
112148 // Perform download
113- this . statusbar . text = `$( sync~spin) Downloading` ;
149+ this . update ( ' sync~spin' , ' Downloading' ) ;
114150 await ollamaDownloadModel ( inferenceConfig . endpoint , inferenceConfig . modelName ) ;
115- this . statusbar . text = `$( sync~spin) Llama Coder` ;
151+ this . update ( ' sync~spin' , ' Llama Coder' )
116152 }
117153 if ( token . isCancellationRequested ) {
118154 info ( `Canceled after AI completion.` ) ;
@@ -141,7 +177,7 @@ export class PromptProvider implements vscode.InlineCompletionItemProvider {
141177 value : res
142178 } ) ;
143179 } finally {
144- this . statusbar . text = `$( chip) Llama Coder` ;
180+ this . update ( ' chip' , ' Llama Coder' ) ;
145181 }
146182 } else {
147183 if ( cached !== null ) {
0 commit comments