Skip to content

Commit

Permalink
Merge pull request #49 from artemptushkin/feature/migrate-to-api-4
Browse files Browse the repository at this point in the history
Migrate to midjourney-api v.4
  • Loading branch information
zcpua committed Sep 12, 2023
2 parents a2becaa + 6984753 commit 31b23f4
Show file tree
Hide file tree
Showing 7 changed files with 1,090 additions and 1,221 deletions.
11 changes: 11 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,17 @@ yarn dev

5. open [http://localhost:3000](http://localhost:3000) with your browser to see the result.

## Build

```bash
# if needed
yarn install --frozen-lockfile

yarn build

docker build -t erictik/midjourney-ui
```

## Route map

- [x] imagine
Expand Down
1 change: 1 addition & 0 deletions interfaces/message.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
export interface Message {
text: string;
img: string;
flags?: number;
msgID?: string;
msgHash?: string;
content?: string;
Expand Down
4 changes: 2 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,8 @@
"@ant-design/pro-layout": "^7.10.3",
"@vercel/analytics": "^1.0.1",
"eventsource-parser": "^1.0.0",
"midjourney": "^2.7.77",
"next": "^13.4.4"
"midjourney": "^4.3.11",
"next": "^13.4.13"
},
"keywords": [
"midjourney-api",
Expand Down
95 changes: 50 additions & 45 deletions pages/api/upscale.ts
Original file line number Diff line number Diff line change
@@ -1,51 +1,56 @@
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
import { Midjourney } from "midjourney";
import { ResponseError } from "../../interfaces";
import {Midjourney} from "midjourney";
import {ResponseError} from "../../interfaces";
import {Upscale} from "../../request";

export const config = {
runtime: "edge",
runtime: "edge",
};

export default async function handler(req: Request) {
const { content, index, msgId, msgHash } = await req.json();
console.log("upscale.handler", content);
const client = new Midjourney({
ServerId: <string>process.env.SERVER_ID,
ChannelId: <string>process.env.CHANNEL_ID,
SalaiToken: <string>process.env.SALAI_TOKEN,
HuggingFaceToken: <string>process.env.HUGGINGFACE_TOKEN,
Debug: true,
Ws: process.env.WS === "true",
});
await client.init();
const encoder = new TextEncoder();
const readable = new ReadableStream({
start(controller) {
console.log("upscale.start", content);
client
.Upscale(
content,
index,
msgId,
msgHash,
(uri: string, progress: string) => {
console.log("upscale.loading", uri);
controller.enqueue(
encoder.encode(JSON.stringify({ uri, progress }))
);
}
)
.then((msg) => {
console.log("upscale.done", msg);
controller.enqueue(encoder.encode(JSON.stringify(msg)));
client.Close();
controller.close();
})
.catch((err: ResponseError) => {
console.log("upscale.error", err);
client.Close();
controller.close();
});
},
});
return new Response(readable, {});
const {content, index, msgId, msgHash, flags} = await req.json();
console.log("upscale.handler", content);
const client = new Midjourney({
ServerId: <string>process.env.SERVER_ID,
ChannelId: <string>process.env.CHANNEL_ID,
SalaiToken: <string>process.env.SALAI_TOKEN,
HuggingFaceToken: <string>process.env.HUGGINGFACE_TOKEN,
Debug: true,
Ws: process.env.WS === "true",
});
await client.init();
const encoder = new TextEncoder();
const readable = new ReadableStream({
start(controller) {
console.log("upscale.start", content);
client
.Upscale(
{
index,
msgId,
hash: msgHash,
content,
flags,
...(uri: string, progress: string) => {
console.log("upscale.loading", uri);
controller.enqueue(
encoder.encode(JSON.stringify({uri, progress}))
);
}
}
)
.then((msg) => {
console.log("upscale.done", msg);
controller.enqueue(encoder.encode(JSON.stringify(msg)));
client.Close();
controller.close();
})
.catch((err: ResponseError) => {
console.log("upscale.error", err);
client.Close();
controller.close();
});
},
});
return new Response(readable, {});
}
25 changes: 14 additions & 11 deletions pages/api/variation.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ export const config = {
runtime: "edge",
};
export default async function handler(req: Request) {
const { content, index, msgId, msgHash } = await req.json();
const { content, index, msgId, msgHash, flags } = await req.json();
console.log("variation.handler", content);
const client = new Midjourney({
ServerId: <string>process.env.SERVER_ID,
Expand All @@ -22,16 +22,19 @@ export default async function handler(req: Request) {
console.log("variation.start", content);
client
.Variation(
content,
index,
msgId,
msgHash,
(uri: string, progress: string) => {
console.log("variation.loading", uri);
controller.enqueue(
encoder.encode(JSON.stringify({ uri, progress }))
);
}
{
index,
msgId,
hash: msgHash,
content,
flags,
...(uri: string, progress: string) => {
console.log("variation.loading", uri);
controller.enqueue(
encoder.encode(JSON.stringify({ uri, progress }))
);
}
}
)
.then((msg) => {
console.log("variation.done", msg);
Expand Down
18 changes: 10 additions & 8 deletions pages/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ const Index: React.FC = () => {
newMessage.msgID = data.id;
newMessage.progress = data.progress;
newMessage.content = data.content;
newMessage.flags = data.flags;
setMessages([...oldMessages, newMessage]);
}
);
Expand Down Expand Up @@ -70,6 +71,7 @@ const Index: React.FC = () => {
newMessage.msgID = data.id;
newMessage.content = data.content;
newMessage.progress = data.progress;
newMessage.flags = data.flags;
setMessages([...oldMessages, newMessage]);
}
);
Expand Down Expand Up @@ -102,6 +104,7 @@ const Index: React.FC = () => {
newMessage.msgID = data.id;
newMessage.content = data.content;
newMessage.progress = data.progress;
newMessage.flags = data.flags;
setMessages([...oldMessages, newMessage]);
}
);
Expand Down Expand Up @@ -192,14 +195,13 @@ const Index: React.FC = () => {

return (
<div className="w-full mx-auto px-4 h-full overflow-y-hidden">
<List
className=" mx-auto xl:w-3/5 w-4/5 justify-start overflow-y-auto"
style={{
height: "calc(100vh - 96px)",
}}
dataSource={messages}
renderItem={renderMessage}
/>
<List
style={{
height: "calc(100vh - 96px)",
}}
dataSource={messages}
renderItem={renderMessage}
/>
<div className="absolute z-10 w-3/4 xl:w-3/5 right-0 bottom-10 left-0 mx-auto ">
<TextArea
className="w-full"
Expand Down
Loading

1 comment on commit 31b23f4

@vercel
Copy link

@vercel vercel bot commented on 31b23f4 Sep 12, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please sign in to comment.