Skip to content

Commit

Permalink
Merge pull request #12361 from GavinGu07/user/gavingu/remove-proposal…
Browse files Browse the repository at this point in the history
…-api

refactor: remove proposal api
  • Loading branch information
1openwindow authored Sep 11, 2024
2 parents bd4384b + 87d594f commit ea46155
Show file tree
Hide file tree
Showing 11 changed files with 25 additions and 53 deletions.
3 changes: 0 additions & 3 deletions packages/vscode-extension/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -62,9 +62,6 @@
"workspaceContains:/**/manifest.json",
"workspaceContains:/manifest*.xml"
],
"enabledApiProposals": [
"languageModelSystem"
],
"capabilities": {
"untrustedWorkspaces": {
"supported": "limited",
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,7 @@ ${spec.appendix.codeExplanation
// Perform the desired operation
const messages: LanguageModelChatMessage[] = [
new LanguageModelChatMessage(LanguageModelChatMessageRole.User, userPrompt),
new LanguageModelChatMessage(LanguageModelChatMessageRole.System, defaultSystemPrompt),
new LanguageModelChatMessage(LanguageModelChatMessageRole.User, defaultSystemPrompt),
];
let copilotResponse = await getCopilotResponseAsString(
"copilot-gpt-3.5-turbo", // "copilot-gpt-4", // "copilot-gpt-3.5-turbo",
Expand Down Expand Up @@ -317,7 +317,7 @@ ${spec.appendix.codeExplanation
if (sampleCode.length > 0) {
messages.push(
new LanguageModelChatMessage(
LanguageModelChatMessageRole.System,
LanguageModelChatMessageRole.User,
getCodeSamplePrompt(sampleCode)
)
);
Expand Down Expand Up @@ -429,7 +429,7 @@ ${spec.appendix.codeExplanation
referenceUserPrompt = customFunctionSystemPrompt;
}
messages.push(
new LanguageModelChatMessage(LanguageModelChatMessageRole.System, referenceUserPrompt)
new LanguageModelChatMessage(LanguageModelChatMessageRole.User, referenceUserPrompt)
);
break;
default:
Expand Down Expand Up @@ -462,9 +462,7 @@ ${spec.appendix.codeExplanation
Let's think step by step.
`;
messages.push(
new LanguageModelChatMessage(LanguageModelChatMessageRole.System, samplePrompt)
);
messages.push(new LanguageModelChatMessage(LanguageModelChatMessageRole.User, samplePrompt));
}
// Because of the token window limitation, we have to cut off the messages if it exceeds the limitation
msgCount = countMessagesTokens(messages);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ export class CodeIssueCorrector implements ISkill {
const sampleMessage: LanguageModelChatMessage | null =
spec.appendix.codeSample.length > 0
? new LanguageModelChatMessage(
LanguageModelChatMessageRole.System,
LanguageModelChatMessageRole.User,
getCodeSamplePrompt(spec.appendix.codeSample)
)
: null;
Expand Down Expand Up @@ -331,7 +331,7 @@ export class CodeIssueCorrector implements ISkill {
// The order in array is matter, don't change it unless you know what you are doing
const messages: LanguageModelChatMessage[] = [
new LanguageModelChatMessage(LanguageModelChatMessageRole.User, tempUserInput),
new LanguageModelChatMessage(LanguageModelChatMessageRole.System, defaultSystemPrompt),
new LanguageModelChatMessage(LanguageModelChatMessageRole.User, defaultSystemPrompt),
];

if (!!sampleMessage) {
Expand All @@ -343,7 +343,7 @@ export class CodeIssueCorrector implements ISkill {
// }

messages.push(
new LanguageModelChatMessage(LanguageModelChatMessageRole.System, referenceUserPrompt)
new LanguageModelChatMessage(LanguageModelChatMessageRole.User, referenceUserPrompt)
);

let msgCount = countMessagesTokens(messages);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,6 @@ export function buildDynamicPrompt<T>(format: IDynamicPromptFormat<T>, args: T):
function createMessage(role: MessageRole, prompt: string): LanguageModelChatMessage {
switch (role) {
case "system":
return new LanguageModelChatMessage(LanguageModelChatMessageRole.System, prompt);
case "user":
return new LanguageModelChatMessage(LanguageModelChatMessageRole.User, prompt);
case "assistant":
Expand Down
8 changes: 4 additions & 4 deletions packages/vscode-extension/src/officeChat/officePrompts.ts
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ export function getOfficeProjectMatchSystemPrompt(

const messages = [
new vscode.LanguageModelChatMessage(
vscode.LanguageModelChatMessageRole.System,
vscode.LanguageModelChatMessageRole.User,
`
You're an assistant designed to find matched Office Add-in projects based on user's input and a list of existing application descriptions. Follow the instructions and think step by step. You'll respond a JSON object containing the addin ID you choose with a float number between 0-1.0 representing confidence. Here's an example of your output format:
{ "id": "", "score": 1.0 }
Expand Down Expand Up @@ -78,7 +78,7 @@ export const defaultOfficeSystemPrompt = () => {
);

return new vscode.LanguageModelChatMessage(
vscode.LanguageModelChatMessageRole.System,
vscode.LanguageModelChatMessageRole.User,
`You are an expert in Office JavaScript add-in development area. Your job is to answer general conceputal question related with Office JavaScript add-in development. Follow the <Instructions> and think step by step.
<Instruction>
Expand All @@ -104,7 +104,7 @@ export const defaultOfficeSystemPrompt = () => {

export const describeOfficeProjectSystemPrompt = () =>
new vscode.LanguageModelChatMessage(
vscode.LanguageModelChatMessageRole.System,
vscode.LanguageModelChatMessageRole.User,
`You are an advisor for Office Add-in developers. You need to describe the project based on the name and description field of user's JSON content. You should control the output between 50 and 80 words.`
);

Expand Down Expand Up @@ -830,6 +830,6 @@ class ${className} extends OfficeExtension.ClientObject {

export const describeOfficeStepSystemPrompt = () =>
new vscode.LanguageModelChatMessage(
vscode.LanguageModelChatMessageRole.System,
vscode.LanguageModelChatMessageRole.User,
`You are an advisor for Office Add-ins developers. You need to reorganize the content. You should control the output between 30 and 50 words. Don't split the content into multiple sentences.`
);
2 changes: 1 addition & 1 deletion packages/vscode-extension/src/officeChat/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,8 @@ export async function purifyUserMessage(
The rephrased message should be clear and concise for developer.
`;
const purifyUserMessage = [
new LanguageModelChatMessage(LanguageModelChatMessageRole.User, systemPrompt),
new LanguageModelChatMessage(LanguageModelChatMessageRole.User, userMessagePrompt),
new LanguageModelChatMessage(LanguageModelChatMessageRole.System, systemPrompt),
];
const purifiedResult = await getCopilotResponseAsString(
"copilot-gpt-4",
Expand Down
5 changes: 0 additions & 5 deletions packages/vscode-extension/test/mocks/vsc/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -58,9 +58,4 @@ export enum LanguageModelChatMessageRole {
* The assistant role.
*/
Assistant = 2,

/**
* The system role.
*/
System = 3,
}
4 changes: 2 additions & 2 deletions packages/vscode-extension/test/mocks/vscode-mock.ts
Original file line number Diff line number Diff line change
Expand Up @@ -104,8 +104,8 @@ mockedVSCode.Task = vscodeMocks.vscMockExtHostedTypes.Task;
(mockedVSCode as any).CancellationError = vscodeMocks.vscMockExtHostedTypes.CancellationError;
(mockedVSCode as any).LSPCancellationError = vscodeMocks.vscMockExtHostedTypes.LSPCancellationError;
mockedVSCode.TaskRevealKind = vscodeMocks.vscMockExtHostedTypes.TaskRevealKind;
mockedVSCode.LanguageModelChatMessage = vscodeMocks.chat.LanguageModelChatMessage;
mockedVSCode.LanguageModelChatMessageRole = vscodeMocks.chat.LanguageModelChatMessageRole;
(mockedVSCode as any).LanguageModelChatMessage = vscodeMocks.chat.LanguageModelChatMessage;
(mockedVSCode as any).LanguageModelChatMessageRole = vscodeMocks.chat.LanguageModelChatMessageRole;
mockedVSCode.TextDocumentSaveReason = vscodeMocks.TextDocumentSaveReason;
(mockedVSCode as any).version = "test";

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -145,12 +145,12 @@ describe("CodeIssueCorrector", () => {
`.repeat(20);
const corrector = new CodeIssueCorrector();
const fakeLanguageModelChatSystemMessage: LanguageModelChatMessage = {
role: LanguageModelChatMessageRole.System,
role: LanguageModelChatMessageRole.User,
content: "some sample message",
name: undefined,
};
const fakeSampleCodeLanguageModelChatSystemMessage: LanguageModelChatMessage = {
role: LanguageModelChatMessageRole.System,
role: LanguageModelChatMessageRole.User,
content: sampleCodeLong,
name: undefined,
};
Expand Down Expand Up @@ -193,12 +193,12 @@ describe("CodeIssueCorrector", () => {
`.repeat(20);
const corrector = new CodeIssueCorrector();
const fakeLanguageModelChatSystemMessage: LanguageModelChatMessage = {
role: LanguageModelChatMessageRole.System,
role: LanguageModelChatMessageRole.User,
content: "some sample message",
name: undefined,
};
const fakeSampleCodeLanguageModelChatSystemMessage: LanguageModelChatMessage = {
role: LanguageModelChatMessageRole.System,
role: LanguageModelChatMessageRole.User,
content: sampleCodeLong,
name: undefined,
};
Expand Down Expand Up @@ -251,12 +251,12 @@ describe("CodeIssueCorrector", () => {
`.repeat(20);
const corrector = new CodeIssueCorrector();
const fakeLanguageModelChatSystemMessage: LanguageModelChatMessage = {
role: LanguageModelChatMessageRole.System,
role: LanguageModelChatMessageRole.User,
content: "some sample message",
name: undefined,
};
const fakeSampleCodeLanguageModelChatSystemMessage: LanguageModelChatMessage = {
role: LanguageModelChatMessageRole.System,
role: LanguageModelChatMessageRole.User,
content: sampleCodeLong,
name: undefined,
};
Expand Down Expand Up @@ -309,12 +309,12 @@ describe("CodeIssueCorrector", () => {
`.repeat(20);
const corrector = new CodeIssueCorrector();
const fakeLanguageModelChatSystemMessage: LanguageModelChatMessage = {
role: LanguageModelChatMessageRole.System,
role: LanguageModelChatMessageRole.User,
content: "some sample message",
name: undefined,
};
const fakeSampleCodeLanguageModelChatSystemMessage: LanguageModelChatMessage = {
role: LanguageModelChatMessageRole.System,
role: LanguageModelChatMessageRole.User,
content: sampleCodeLong,
name: undefined,
};
Expand Down Expand Up @@ -360,7 +360,7 @@ describe("CodeIssueCorrector", () => {
it("fixIssueAsync error with code length reduced too much", async () => {
const corrector = new CodeIssueCorrector();
const fakeLanguageModelChatSystemMessage: LanguageModelChatMessage = {
role: LanguageModelChatMessageRole.System,
role: LanguageModelChatMessageRole.User,
content: "some sample message",
name: undefined,
};
Expand Down Expand Up @@ -402,7 +402,7 @@ describe("CodeIssueCorrector", () => {
it("fixIssueAsync return newCodeStr", async () => {
const corrector = new CodeIssueCorrector();
const fakeLanguageModelChatSystemMessage: LanguageModelChatMessage = {
role: LanguageModelChatMessageRole.System,
role: LanguageModelChatMessageRole.User,
content: "some sample message",
name: undefined,
};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ describe("File: dynamicPrompt/index", () => {
chai
.expect(result.messages[0])
.deep.equal(
new vscode.LanguageModelChatMessage(vscode.LanguageModelChatMessageRole.System, "test")
new vscode.LanguageModelChatMessage(vscode.LanguageModelChatMessageRole.User, "test")
);
});

Expand Down

0 comments on commit ea46155

Please sign in to comment.