Skip to content

Commit

Permalink
add SiliconflowLLM
Browse files Browse the repository at this point in the history
  • Loading branch information
shadowcz007 committed Jul 29, 2024
1 parent db1e695 commit 35492c5
Show file tree
Hide file tree
Showing 6 changed files with 214 additions and 12 deletions.
11 changes: 6 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,14 @@

##### `最新`

- 增加 Edit Mask,方便在生成的时候手动绘制 mask [workflow](./workflow/edit-mask-workflow.json)

- 增加 SiliconflowLLM,可以使用由Siliconflow提供的免费LLM

- ChatGPT 节点支持 Local LLM(llama.cpp),Phi3、llama3 都可以直接一个节点运行了。模型下载后,放置到 `models/llamafile/`
- 增加 Edit Mask,方便在生成的时候手动绘制 mask [workflow](./workflow/edit-mask-workflow.json)

- 右键菜单支持 text-to-text,方便对 prompt 词补全
<!-- - ChatGPT 节点支持 Local LLM(llama.cpp),Phi3、llama3 都可以直接一个节点运行了。模型下载后,放置到 `models/llamafile/` -->

<!-- - 右键菜单支持 text-to-text,方便对 prompt 词补全 -->
<!--
强烈推荐:
[Phi-3-mini-4k-instruct-function-calling-GGUF](https://huggingface.co/nold/Phi-3-mini-4k-instruct-function-calling-GGUF)
Expand All @@ -21,7 +22,7 @@
- 右键菜单支持 image-to-text,使用多模态模型,多模态使用 [llava-phi-3-mini-gguf](https://huggingface.co/xtuner/llava-phi-3-mini-gguf/tree/main),注意需要把llava-phi-3-mini-mmproj-f16.gguf也下载
![](./assets/prompt_ai_setup.png)
![](./assets/prompt-ai.png)
![](./assets/prompt-ai.png) -->


#### `相关插件推荐`
Expand Down
4 changes: 3 additions & 1 deletion __init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -1147,11 +1147,12 @@ def re_start(request):
# print('\033[91m ### Mixlab Nodes: \033[93mLoaded')

try:
from .nodes.ChatGPT import ChatGPTNode,ShowTextForGPT,CharacterInText,TextSplitByDelimiter
from .nodes.ChatGPT import ChatGPTNode,ShowTextForGPT,CharacterInText,TextSplitByDelimiter,SiliconflowFreeNode
logging.info('ChatGPT.available True')

NODE_CLASS_MAPPINGS_V = {
"ChatGPTOpenAI":ChatGPTNode,
"SiliconflowLLM":SiliconflowFreeNode,
"ShowTextForGPT":ShowTextForGPT,
"CharacterInText":CharacterInText,
"TextSplitByDelimiter":TextSplitByDelimiter,
Expand All @@ -1160,6 +1161,7 @@ def re_start(request):
# 一个包含节点友好/可读的标题的字典
NODE_DISPLAY_NAME_MAPPINGS_V = {
"ChatGPTOpenAI":"ChatGPT & Local LLM ♾️Mixlab",
"SiliconflowLLM":"LLM Siliconflow ♾️Mixlab",
"ShowTextForGPT":"Show Text ♾️MixlabApp",
"CharacterInText":"Character In Text",
"TextSplitByDelimiter":"Text Split By Delimiter",
Expand Down
94 changes: 90 additions & 4 deletions nodes/ChatGPT.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,8 +53,8 @@ def azure_client(key,url):

def openai_client(key,url):
client = openai.OpenAI(
api_key=key,
base_url=url
api_key=key,
base_url=url
)
return client

Expand Down Expand Up @@ -163,7 +163,7 @@ def llama_cpp_client(file_name):


def chat(client, model_name,messages ):

print('#chat',model_name,messages)
try_count = 0
while True:
try_count += 1
Expand Down Expand Up @@ -304,7 +304,7 @@ def generate_contextual_text(self,
client=llama_cpp_client(model)
else :
client = openai_client(api_key,api_url) # 使用 ChatGPT 的接口
print('using ChatGPT interface')
# print('using ChatGPT interface',api_key,api_url)

# 把用户的提示添加到会话历史中
# 调用API时传递整个会话历史
Expand All @@ -320,6 +320,7 @@ def crop_list_tail(lst, size):
session_history=crop_list_tail(self.session_history,context_size)

messages=[{"role": "system", "content": self.system_content}]+session_history+[{"role": "user", "content": prompt}]

response_content = chat(client,model,messages)

self.session_history=self.session_history+[{"role": "user", "content": prompt}]+[{'role':'assistant',"content":response_content}]
Expand All @@ -340,6 +341,91 @@ def crop_list_tail(lst, size):
return (response_content,json.dumps(messages, indent=4),json.dumps(self.session_history, indent=4),)


class SiliconflowFreeNode:
def __init__(self):
# self.__client = OpenAI()
self.session_history = [] # 用于存储会话历史的列表
# self.seed=0
self.system_content="You are ChatGPT, a large language model trained by OpenAI. Answer as concisely as possible."

@classmethod
def INPUT_TYPES(cls):
model_list= [
"Qwen/Qwen2-7B-Instruct",
"THUDM/glm-4-9b-chat",
"01-ai/Yi-1.5-9B-Chat-16K",
"meta-llama/Meta-Llama-3.1-8B-Instruct"
]
return {
"required": {
"api_key":("KEY", {"default": "", "multiline": True,"dynamicPrompts": False}),
"prompt": ("STRING", {"multiline": True,"dynamicPrompts": False}),
"system_content": ("STRING",
{
"default": "You are ChatGPT, a large language model trained by OpenAI. Answer as concisely as possible.",
"multiline": True,"dynamicPrompts": False
}),
"model": ( model_list,
{"default": model_list[0]}),
"seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff, "step": 1}),
"context_size":("INT", {"default": 1, "min": 0, "max":30, "step": 1}),
},
"hidden": {
"unique_id": "UNIQUE_ID",
"extra_pnginfo": "EXTRA_PNGINFO",
},
}

RETURN_TYPES = ("STRING","STRING","STRING",)
RETURN_NAMES = ("text","messages","session_history",)
FUNCTION = "generate_contextual_text"
CATEGORY = "♾️Mixlab/GPT"
INPUT_IS_LIST = False
OUTPUT_IS_LIST = (False,False,False,)


def generate_contextual_text(self,
api_key,
prompt,
system_content,
model,
seed,context_size,unique_id = None, extra_pnginfo=None):

api_url="https://api.siliconflow.cn/v1"

# 把系统信息和初始信息添加到会话历史中
if system_content:
self.system_content=system_content
# self.session_history=[]
# self.session_history.append({"role": "system", "content": system_content})

#
client = openai_client(api_key,api_url) # 使用 ChatGPT 的接口
# print('using ChatGPT interface',api_key,api_url)

# 把用户的提示添加到会话历史中
# 调用API时传递整个会话历史

def crop_list_tail(lst, size):
if size >= len(lst):
return lst
elif size==0:
return []
else:
return lst[-size:]

session_history=crop_list_tail(self.session_history,context_size)

messages=[{"role": "system", "content": self.system_content}]+session_history+[{"role": "user", "content": prompt}]

response_content = chat(client,model,messages)

self.session_history=self.session_history+[{"role": "user", "content": prompt}]+[{'role':'assistant',"content":response_content}]

return (response_content,json.dumps(messages, indent=4),json.dumps(self.session_history, indent=4),)




class ShowTextForGPT:
@classmethod
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
[project]
name = "comfyui-mixlab-nodes"
description = "3D, ScreenShareNode & FloatingVideoNode, SpeechRecognition & SpeechSynthesis, GPT, LoadImagesFromLocal, Layers, Other Nodes, ..."
version = "0.31.1"
version = "0.32.0"
license = "MIT"
dependencies = ["numpy", "pyOpenSSL", "watchdog", "opencv-python-headless", "matplotlib", "openai", "simple-lama-inpainting", "clip-interrogator==0.6.0", "transformers>=4.36.0", "lark-parser", "imageio-ffmpeg", "rembg[gpu]", "omegaconf==2.3.0", "Pillow>=9.5.0", "einops==0.7.0", "trimesh>=4.0.5", "huggingface-hub", "scikit-image"]

Expand Down
1 change: 0 additions & 1 deletion web/javascript/app_mixlab.js
Original file line number Diff line number Diff line change
Expand Up @@ -466,7 +466,6 @@ app.registerExtension({
const { input, output } = getInputsAndOutputs()
input_ids.value = input.join('\n')
output_ids.value = output.join('\n')

const widget = {
type: 'div',
name: 'AppInfoRun',
Expand Down
114 changes: 114 additions & 0 deletions web/javascript/gpt_mixlab.js
Original file line number Diff line number Diff line change
Expand Up @@ -199,6 +199,120 @@ app.registerExtension({
url[id] || 'https://api.openai.com/v1'
}
}
});


app.registerExtension({
name: 'Mixlab.GPT.SiliconflowLLM',
async getCustomWidgets (app) {
return {
KEY (node, inputName, inputData, app) {
// console.log('##inputData', inputData)
const widget = {
type: inputData[0], // the type, CHEESE
name: inputName, // the name, slice
size: [128, 32], // a default size
draw (ctx, node, width, y) {},
computeSize (...args) {
return [128, 32] // a method to compute the current size of the widget
},
async serializeValue (nodeId, widgetIndex) {
let data = getLocalData('_mixlab_api_key')
return data[node.id] || 'by Mixlab'
}
}
// widget.something = something; // maybe adds stuff to it
node.addCustomWidget(widget) // adds it to the node
return widget // and returns it.
},
}
},

async beforeRegisterNodeDef (nodeType, nodeData, app) {
if (nodeType.comfyClass == 'SiliconflowLLM') {
const orig_nodeCreated = nodeType.prototype.onNodeCreated
nodeType.prototype.onNodeCreated = function () {
orig_nodeCreated?.apply(this, arguments)

const api_key = this.widgets.filter(w => w.name == 'api_key')[0]

const widget = {
type: 'div',
name: 'chatgptdiv',
draw (ctx, node, widget_width, y, widget_height) {
Object.assign(
this.div.style,
get_position_style(ctx, widget_width, api_key.y, node.size[1])
)
}
}

widget.div = $el('div', {})

document.body.appendChild(widget.div)

const inputDiv = (key, placeholder) => {
let div = document.createElement('div')
const ip = document.createElement('input')
ip.type = placeholder === 'Key' ? 'password' : 'text'
ip.className = `${'comfy-multiline-input'} ${placeholder}`
div.style = `display: flex;
align-items: center;
margin: 6px 8px;
margin-top: 0;`
ip.placeholder = placeholder
ip.value = placeholder

ip.style = `margin-left: 24px;
outline: none;
border: none;
padding: 4px;width: 100%;`
const label = document.createElement('label')
label.style = 'font-size: 10px;min-width:32px'
label.innerText = placeholder
div.appendChild(label)
div.appendChild(ip)

ip.addEventListener('change', () => {
let data = getLocalData(key)
data[this.id] = ip.value.trim()
localStorage.setItem(key, JSON.stringify(data))
console.log(this.id, key)
})
return div
}

let inputKey = inputDiv('_mixlab_api_key', 'Key')

widget.div.appendChild(inputKey)

this.addCustomWidget(widget)

const onRemoved = this.onRemoved
this.onRemoved = () => {
inputKey.remove()
widget.div.remove()
return onRemoved?.()
}

this.serialize_widgets = true //需要保存参数
}
}
},
async loadedGraphNode (node, app) {
// Fires every time a node is constructed
// You can modify widgets/add handlers/etc here

if (node.type === 'SiliconflowLLM') {
let widget = node.widgets.filter(w => w.div)[0]

let apiKey = getLocalData('_mixlab_api_key');

let id = node.id

widget.div.querySelector('.Key').value = apiKey[id] || 'by Mixlab'
}
}
})

app.registerExtension({
Expand Down

0 comments on commit 35492c5

Please sign in to comment.