Skip to content

Commit 909404f

Browse files
committed
Merge branch 'rvion/controlnet-updates-patches'
2 parents ebf4116 + 95ed988 commit 909404f

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

43 files changed

+1873
-863
lines changed

library/built-in/SDUI.ts

Lines changed: 43 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ app({
4040
model: ui_model(),
4141
latent: ui_latent(),
4242
sampler: ui_sampler(),
43-
highResFix: ui_highresfix(form, { activeByDefault: true }),
43+
highResFix: ui_highresfix({ activeByDefault: true }),
4444
upscale: ui_upscaleWithModel(),
4545
controlnets: ui_cnet(),
4646
recursiveImgToImg: ui_recursive(),
@@ -53,10 +53,7 @@ app({
5353
compressImage: ui_saveAllImages(),
5454
// startImage
5555
removeBG: form.bool({ default: false }),
56-
reversePositiveAndNegative: form.bool({ default: false }),
57-
makeAVideo: form.bool({ default: false }),
58-
summary: form.bool({ default: false }),
59-
gaussianSplat: form.bool({ default: false }),
56+
6057
improveFaces: ui_improveFace(),
6158
show3d: form.groupOpt({
6259
items: () => {
@@ -72,15 +69,23 @@ app({
7269
}
7370
},
7471
}),
72+
testStuff: form.groupOpt({
73+
items: () => ({
74+
reversePositiveAndNegative: form.bool({ default: false }),
75+
makeAVideo: form.bool({ default: false }),
76+
summary: form.bool({ default: false }),
77+
gaussianSplat: form.bool({ default: false }),
78+
}),
79+
}),
7580
}),
7681

7782
run: async (run, ui) => {
7883
const graph = run.nodes
7984
// MODEL, clip skip, vae, etc. ---------------------------------------------------------------
8085
let { ckpt, vae, clip } = run_model(ui.model)
8186

82-
const posPrompt = ui.reversePositiveAndNegative ? ui.negative : ui.positive
83-
const negPrompt = ui.reversePositiveAndNegative ? ui.positive : ui.negative
87+
const posPrompt = ui.testStuff?.reversePositiveAndNegative ? ui.negative : ui.positive
88+
const negPrompt = ui.testStuff?.reversePositiveAndNegative ? ui.positive : ui.negative
8489

8590
// RICH PROMPT ENGINE -------- ---------------------------------------------------------------
8691
const x = run_prompt(run, { richPrompt: posPrompt, clip, ckpt, outputWildcardsPicked: true })
@@ -103,6 +108,7 @@ app({
103108
width,
104109
height,
105110
ckptPos,
111+
modelType: ui.latent.size.modelType,
106112
}
107113
cnet_out = await run_cnet(ui.controlnets, Cnet_args)
108114
positive = cnet_out.cnet_positive
@@ -147,33 +153,40 @@ app({
147153
// }
148154

149155
// SECOND PASS (a.k.a. highres fix) ---------------------------------------------------------
150-
const ctx_sampler_fix: Ctx_sampler = {
151-
ckpt: ckptPos,
152-
clip: clipPos,
153-
vae,
154-
latent,
155-
positive: cnet_out?.post_cnet_positive ?? positive,
156-
negative: cnet_out?.post_cnet_negative ?? negative,
157-
preview: false,
158-
}
159-
if (ui.highResFix) {
160-
if (ui.highResFix.saveIntermediaryImage) {
156+
const HRF = ui.highResFix
157+
if (HRF) {
158+
const ctx_sampler_fix: Ctx_sampler = {
159+
ckpt: ckptPos,
160+
clip: clipPos,
161+
vae,
162+
latent,
163+
positive: cnet_out?.post_cnet_positive ?? positive,
164+
negative: cnet_out?.post_cnet_negative ?? negative,
165+
preview: false,
166+
}
167+
if (HRF.saveIntermediaryImage) {
161168
graph.SaveImage({ images: graph.VAEDecode({ samples: latent, vae }) })
162169
}
163-
latent = graph.LatentUpscale({
164-
samples: latent,
165-
crop: 'disabled',
166-
upscale_method: 'nearest-exact',
167-
height: ui.latent.size.height * ui.highResFix.scaleFactor,
168-
width: ui.latent.size.width * ui.highResFix.scaleFactor,
169-
})
170+
latent = HRF.NNLatentUpscale
171+
? graph.NNLatentUpscale({
172+
latent,
173+
version: ui.latent.size.modelType == 'SDXL 1024' ? 'SDXL' : 'SD 1.x',
174+
upscale: HRF.scaleFactor,
175+
})
176+
: graph.LatentUpscale({
177+
samples: latent,
178+
crop: 'disabled',
179+
upscale_method: 'nearest-exact',
180+
height: ui.latent.size.height * ui.highResFix.scaleFactor,
181+
width: ui.latent.size.width * ui.highResFix.scaleFactor,
182+
})
170183
latent = latent = run_sampler(
171184
run,
172185
{
173186
seed: ui.sampler.seed,
174187
cfg: ui.sampler.cfg,
175-
steps: ui.highResFix.steps,
176-
denoise: ui.highResFix.denoise,
188+
steps: HRF.steps,
189+
denoise: HRF.denoise,
177190
sampler_name: 'ddim',
178191
scheduler: 'ddim_uniform',
179192
},
@@ -233,8 +246,8 @@ app({
233246

234247
await run.PROMPT()
235248

236-
if (ui.gaussianSplat) run.output_GaussianSplat({ url: '' })
237-
if (ui.summary) output_demo_summary(run)
249+
if (ui.testStuff?.gaussianSplat) run.output_GaussianSplat({ url: '' })
250+
if (ui.testStuff?.summary) output_demo_summary(run)
238251
if (show3d) run.output_3dImage({ image: 'base', depth: 'depth', normal: 'normal' })
239252

240253
if (ui.compressImage) {
@@ -251,6 +264,6 @@ app({
251264
}
252265
}
253266

254-
if (ui.makeAVideo) await run.Videos.output_video_ffmpegGeneratedImagesTogether(undefined, 2)
267+
if (ui.testStuff?.makeAVideo) await run.Videos.output_video_ffmpegGeneratedImagesTogether(undefined, 2)
255268
},
256269
})
Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
export const ipAdapterDoc = `\
2+
The following table shows the combination of Checkpoint and Image encoder to use for each IPAdapter Model. Any Tensor size mismatch you may get it is likely caused by a wrong combination.
3+
4+
| SD v. | IPadapter | Img encoder | Notes |
5+
|---|---|---|---|
6+
| v1.5 | [ip-adapter_sd15](https://huggingface.co/h94/IP-Adapter/resolve/main/models/ip-adapter_sd15.safetensors) | ViT-H | Basic model, average strength |
7+
| v1.5 | [ip-adapter_sd15_light](https://huggingface.co/h94/IP-Adapter/resolve/main/models/ip-adapter_sd15_light.safetensors) | ViT-H | Light model, very light impact |
8+
| v1.5 | [ip-adapter-plus_sd15](https://huggingface.co/h94/IP-Adapter/resolve/main/models/ip-adapter-plus_sd15.safetensors) | ViT-H | Plus model, very strong |
9+
| v1.5 | [ip-adapter-plus-face_sd15](https://huggingface.co/h94/IP-Adapter/resolve/main/models/ip-adapter-plus-face_sd15.safetensors) | ViT-H | Face model, use only for faces |
10+
| v1.5 | [ip-adapter-full-face_sd15](https://huggingface.co/h94/IP-Adapter/resolve/main/models/ip-adapter-full-face_sd15.safetensors) | ViT-H | Strongher face model, not necessarily better |
11+
| v1.5 | [ip-adapter_sd15_vit-G](https://huggingface.co/h94/IP-Adapter/resolve/main/models/ip-adapter_sd15_vit-G.safetensors) | ViT-bigG | Base model trained with a bigG encoder |
12+
| SDXL | [ip-adapter_sdxl](https://huggingface.co/h94/IP-Adapter/resolve/main/sdxl_models/ip-adapter_sdxl.safetensors) | ViT-bigG | Base SDXL model, mostly deprecated |
13+
| SDXL | [ip-adapter_sdxl_vit-h](https://huggingface.co/h94/IP-Adapter/resolve/main/sdxl_models/ip-adapter_sdxl_vit-h.safetensors) | ViT-H | New base SDXL model |
14+
| SDXL | [ip-adapter-plus_sdxl_vit-h](https://huggingface.co/h94/IP-Adapter/resolve/main/sdxl_models/ip-adapter-plus_sdxl_vit-h.safetensors) | ViT-H | SDXL plus model, stronger |
15+
| SDXL | [ip-adapter-plus-face_sdxl_vit-h](https://huggingface.co/h94/IP-Adapter/resolve/main/sdxl_models/ip-adapter-plus-face_sdxl_vit-h.safetensors) | ViT-H | SDXL face model |
16+
17+
**FaceID** requires \`insightface\`, you need to install them in your ComfyUI environment. Check [this issue](https://github.com/cubiq/ComfyUI_IPAdapter_plus/issues/162) for help.
18+
19+
When the dependencies are satisfied you need:
20+
21+
| SD v. | IPadapter | Img encoder | Lora |
22+
|---|---|---|---|
23+
| v1.5 | [FaceID](https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid_sd15.bin) | (not used¹) | [FaceID Lora](https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid_sd15_lora.safetensors) |
24+
| v1.5 | [FaceID Plus](https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid-plus_sd15.bin) | ViT-H | [FaceID Plus Lora](https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid-plus_sd15_lora.safetensors) |
25+
| v1.5 | [FaceID Plus v2](https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid-plusv2_sd15.bin) | ViT-H | [FaceID Plus v2 Lora](https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid-plusv2_sd15_lora.safetensors) |
26+
| SDXL | [FaceID](https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid_sdxl.bin) | (not used¹) | [FaceID SDXL Lora](https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid_sdxl_lora.safetensors) |
27+
28+
`
Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
import type { ComfyUIManagerKnownModelNames } from 'src/wiki/modelListType'
2+
3+
export const ipAdapterModelList: ComfyUIManagerKnownModelNames[] = [
4+
'ip-adapter_sd15.safetensors',
5+
'ip-adapter_sd15_light.safetensors',
6+
'ip-adapter_sd15_vit-G.safetensors',
7+
'ip-adapter-plus_sd15.safetensors',
8+
'ip-adapter-plus-face_sd15.safetensors',
9+
'ip-adapter-full-face_sd15.safetensors',
10+
'ip-adapter_sdxl.safetensors',
11+
'ip-adapter_sdxl_vit-h.safetensors',
12+
'ip-adapter-plus_sdxl_vit-h.safetensors',
13+
'ip-adapter-plus-face_sdxl_vit-h.safetensors',
14+
]
15+
16+
export const ipAdapterClipModelList: ComfyUIManagerKnownModelNames[] = [
17+
//
18+
'ip-adapter-faceid_sd15.bin',
19+
'ip-adapter-faceid-plusv2_sd15.bin',
20+
]
21+
22+
export const ipAdapterFaceIDLoraList: ComfyUIManagerKnownModelNames[] = [
23+
//
24+
'ip-adapter-faceid_sd15_lora.safetensors',
25+
'ip-adapter-faceid-plusv2_sd15_lora.safetensors',
26+
]
Lines changed: 49 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,49 @@
1+
import type { ComfyUIManagerKnownModelNames } from 'src/wiki/modelListType'
2+
import type { FormBuilder } from 'src'
3+
import { ipAdapterClipModelList } from './_ipAdapterModelList'
4+
5+
// 🅿️ IPAdapter Common FORM ===================================================
6+
export const ui_subform_IPAdapter_common = (form: FormBuilder, defaultStrength: number = 1) => ({
7+
strength: form.float({ default: defaultStrength, min: 0, max: 2, step: 0.1 }),
8+
crop: form.bool({ default: true }),
9+
advanced: form.groupOpt({
10+
label: 'Advanced',
11+
items: () => ({
12+
startAtStepPercent: form.float({ default: 0, min: 0, max: 1, step: 0.1 }),
13+
endAtStepPercent: form.float({ default: 1, min: 0, max: 1, step: 0.1 }),
14+
noise: form.float({ default: 0, min: 0, max: 1, step: 0.1 }),
15+
unfold_batch: form.bool({ default: false }),
16+
}),
17+
}),
18+
})
19+
20+
//🅿️ IPAdapter CLIP Selection ===================================================
21+
export const ui_ipadapter_CLIPSelection = (form: FormBuilder) => ({
22+
clip_name: form.enum({
23+
enumName: 'Enum_CLIPVisionLoader_clip_name',
24+
default: { value: 'model.safetensors' },
25+
recommandedModels: {
26+
modelFolderPrefix: 'models/clip_vision',
27+
knownModel: ipAdapterClipModelList,
28+
},
29+
// default: 'ip-adapter_sd15.safetensors'
30+
label: 'CLIP Vision Model',
31+
}),
32+
})
33+
34+
//🅿️ IPAdapter Model Selection ===================================================
35+
export const ui_ipadapter_modelSelection = (
36+
form: FormBuilder,
37+
defaultModel: Enum_IPAdapterModelLoader_ipadapter_file = 'ip-adapter_sd15.safetensors',
38+
knownModels: ComfyUIManagerKnownModelNames | ComfyUIManagerKnownModelNames[] | undefined,
39+
) => ({
40+
cnet_model_name: form.enum({
41+
enumName: 'Enum_IPAdapterModelLoader_ipadapter_file',
42+
default: { value: defaultModel },
43+
recommandedModels: {
44+
knownModel: knownModels,
45+
},
46+
// default: 'ip-adapter_sd15.safetensors'
47+
label: 'IP Adapter Model',
48+
}),
49+
})
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
misc notes while reviewing the PR:
2+
3+
- 1. there seems to have a new ip-adapter-faceid-plusv2 that came out yesteday: adding the related models such as 'ip-adapter-faceid-plusv2_sd15.bin' to the recommendation list
4+
5+
- 2. NNLatentUpscale
6+
- Seems to exists in two different custom nodes pack
7+
- I didn't have it.
8+
- => Recommending both extensions
9+
- => making it behind a flag in case custom node not installed
Lines changed: 57 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,57 @@
1+
import { OutputFor } from '../../_prefabs'
2+
import { Cnet_args } from '../../prefab_cnet'
3+
import { ipAdapterDoc } from './_ipAdapterDoc'
4+
import { ipAdapterModelList } from './_ipAdapterModelList'
5+
import { ui_ipadapter_CLIPSelection, ui_ipadapter_modelSelection, ui_subform_IPAdapter_common } from './_ipAdapterUtils'
6+
7+
// 🅿️ IPAdapter Basic ===================================================
8+
export const ui_subform_IPAdapter = () => {
9+
const form = getCurrentForm()
10+
return form.group({
11+
label: 'IPAdapter',
12+
customNodesByTitle: ['ComfyUI_IPAdapter_plus'],
13+
items: () => ({
14+
help: form.markdown({ startCollapsed: true, markdown: ipAdapterDoc }),
15+
...ui_ipadapter_CLIPSelection(form),
16+
...ui_ipadapter_modelSelection(form, 'ip-adapter-faceid-plus_sd15.bin' as any, ipAdapterModelList),
17+
...ui_subform_IPAdapter_common(form),
18+
}),
19+
})
20+
}
21+
22+
// 🅿️ IPAdapter RUN ===================================================
23+
export const run_cnet_IPAdapter = (
24+
IPAdapter: OutputFor<typeof ui_subform_IPAdapter>,
25+
cnet_args: Cnet_args,
26+
image: _IMAGE,
27+
): {
28+
ip_adapted_model: _MODEL
29+
} => {
30+
const run = getCurrentRun()
31+
const graph = run.nodes
32+
const ip = IPAdapter
33+
//crop the image to the right size
34+
//todo: make these editable
35+
image = graph.PrepImageForClipVision({
36+
image,
37+
interpolation: 'LANCZOS',
38+
crop_position: 'center',
39+
sharpening: 0,
40+
})._IMAGE
41+
const ip_model = graph.IPAdapterModelLoader({ ipadapter_file: ip.cnet_model_name })
42+
const ip_clip_name = graph.CLIPVisionLoader({ clip_name: ip.clip_name })
43+
const ip_adapted_model = graph.IPAdapterApply({
44+
ipadapter: ip_model,
45+
clip_vision: ip_clip_name,
46+
image: image,
47+
model: cnet_args.ckptPos,
48+
weight: ip.strength,
49+
noise: ip.advanced?.noise ?? 0,
50+
weight_type: 'original',
51+
start_at: ip.advanced?.startAtStepPercent ?? 0,
52+
end_at: ip.advanced?.endAtStepPercent ?? 1,
53+
unfold_batch: ip.advanced?.unfold_batch ?? false,
54+
})._MODEL
55+
56+
return { ip_adapted_model }
57+
}

0 commit comments

Comments
 (0)