π±How To: Create a Simple Cartoon Portrait Animation Glif (LivePortrait + Custom Blocks)
This workflow guides you through leveraging the LivePortrait tool in ComfyUI and calling other Glifs!
Remix the existing Glif here:
Step by Step
Portrait Stylizer (IP Adapter + Controlnet)
Click Build to start a new project.
Add an image input block.
Add a text input block.
Add a Glif block.
Select the pre-loaded GPT Vision Glif.
Set the text prompt, image input and max token for the Vision Glif Block.
Add a Text Combiner Block.
Call the text input, vision, and add any additional prompts to the Text Combiner Block.
Add a ComfyUI Block.
Drop in the provided JSON.
Check that the image input and text prompt input are formatted correctly to match the names of those blocks in your glif.
Publish your glif.
Live Portrait Animator
Click Build to start a new project.
Add an Image Input Block.
Add a Text Input Block.
Add a Glif Block.
Find the unique code in the web address of the glif you intend to call in the Glif Block. (See video for more)
Write the code into the Glif Block.
Adjust the image and text input sections to correspond with your user input blocks.
Add a ComfyUI Block.
Replace the code with the provided json below.
Check the image input in the ComfyUI block to make sure it corresponds with your Glif Block name.
(Optional) Swap out the video with another that is 30 seconds or less in length.
Publish your glif.
textPrompt
We're working for a blind artist who will paint this person or scene for a special occasion - she is capable of painting people or scenes by merely being told a description of how the scene or the person looks like, no more than 10 words, please provide those 10 words. For a person, make sure to include gender, rough age, skin tone and any specific features like hair color or style. For a scene, describe the scene! Do not comma separate them:
imageURL
{image input block name}
maxTokens
50
{
"3": {
"inputs": {
"seed": -1,
"steps": 35,
"cfg": 5,
"sampler_name": "dpmpp_2m_sde",
"scheduler": "karras",
"denoise": 0.9500000000000001,
"model": [
"22",
0
],
"positive": [
"24",
0
],
"negative": [
"7",
0
],
"latent_image": [
"5",
0
]
},
"class_type": "KSampler",
"_meta": {
"title": "KSampler"
}
},
"4": {
"inputs": {
"ckpt_name": "sd_xl_base_1.0.safetensors"
},
"class_type": "CheckpointLoaderSimple",
"_meta": {
"title": "Load Checkpoint"
}
},
"5": {
"inputs": {
"width": [
"23",
0
],
"height": [
"23",
1
],
"batch_size": 1
},
"class_type": "EmptyLatentImage",
"_meta": {
"title": "Empty Latent Image"
}
},
"6": {
"inputs": {
"text": "{prompt}",
"clip": [
"28",
1
]
},
"class_type": "CLIPTextEncode",
"_meta": {
"title": "CLIP Text Encode (Prompt)"
}
},
"7": {
"inputs": {
"text": "bad, messy, ugly, bad hands, disfigured, weird, photography, photo",
"clip": [
"28",
1
]
},
"class_type": "CLIPTextEncode",
"_meta": {
"title": "CLIP Text Encode (Prompt)"
}
},
"8": {
"inputs": {
"samples": [
"3",
0
],
"vae": [
"4",
2
]
},
"class_type": "VAEDecode",
"_meta": {
"title": "VAE Decode"
}
},
"9": {
"inputs": {
"filename_prefix": "ComfyUI",
"images": [
"8",
0
]
},
"class_type": "SaveImage",
"_meta": {
"title": "Save Image"
}
},
"20": {
"inputs": {
"image": "{image-input1}"
},
"class_type": "LoadImage",
"_meta": {
"title": "Load Image"
}
},
"21": {
"inputs": {
"preset": "PLUS FACE (portraits)",
"model": [
"4",
0
]
},
"class_type": "IPAdapterUnifiedLoader",
"_meta": {
"title": "IPAdapter Unified Loader"
}
},
"22": {
"inputs": {
"weight": 0.25,
"start_at": 0.169,
"end_at": 0.916,
"weight_type": "standard",
"model": [
"28",
0
],
"ipadapter": [
"21",
1
],
"image": [
"20",
0
]
},
"class_type": "IPAdapter",
"_meta": {
"title": "IPAdapter"
}
},
"23": {
"inputs": {
"image": [
"20",
0
]
},
"class_type": "SDXLAspectRatio",
"_meta": {
"title": "Image to SDXL compatible WH"
}
},
"24": {
"inputs": {
"strength": 0.5,
"conditioning": [
"6",
0
],
"control_net": [
"26",
0
],
"image": [
"25",
0
]
},
"class_type": "ControlNetApply",
"_meta": {
"title": "Apply ControlNet"
}
},
"25": {
"inputs": {
"image": [
"20",
0
]
},
"class_type": "AnyLinePreprocessor",
"_meta": {
"title": "TheMisto.ai Anyline"
}
},
"26": {
"inputs": {
"control_net_name": "mistoLine_fp16.safetensors"
},
"class_type": "ControlNetLoader",
"_meta": {
"title": "Load ControlNet Model"
}
},
"28": {
"inputs": {
"repo_id": "alvdansen/midsommarcartoon",
"subfolder": "",
"filename": "araminta_k_midsommar_cartoon.safetensors",
"strength_model": 1,
"strength_clip": 1,
"model": [
"4",
0
],
"clip": [
"4",
1
]
},
"class_type": "HFHubLoraLoader",
"_meta": {
"title": "Load HF Lora"
}
}
}
{
"1": {
"inputs": {
"precision": "fp16"
},
"class_type": "DownloadAndLoadLivePortraitModels",
"_meta": {
"title": "(Down)Load LivePortraitModels"
}
},
"78": {
"inputs": {
"image": [
"165",
0
]
},
"class_type": "GetImageSizeAndCount",
"_meta": {
"title": "Get Image Size & Count"
}
},
"81": {
"inputs": {
"expand": 0,
"incremental_expandrate": 0,
"tapered_corners": true,
"flip_input": false,
"blur_radius": 13.9,
"lerp_alpha": 1,
"decay_factor": 1,
"fill_holes": false,
"mask": [
"83",
0
]
},
"class_type": "GrowMaskWithBlur",
"_meta": {
"title": "Grow Mask With Blur"
}
},
"82": {
"inputs": {
"mask": [
"81",
0
]
},
"class_type": "MaskPreview+",
"_meta": {
"title": "π§ Mask Preview"
}
},
"83": {
"inputs": {
"shape": "square",
"frames": 1,
"location_x": 256,
"location_y": 256,
"grow": 0,
"frame_width": 512,
"frame_height": 512,
"shape_width": 480,
"shape_height": 480
},
"class_type": "CreateShapeMask",
"_meta": {
"title": "Create Shape Mask"
}
},
"134": {
"inputs": {
"image": [
"197",
0
]
},
"class_type": "GetImageSizeAndCount",
"_meta": {
"title": "Get Image Size & Count"
}
},
"165": {
"inputs": {
"width": 512,
"height": 512,
"upscale_method": "lanczos",
"keep_proportion": true,
"divisible_by": 2,
"image": [
"196",
0
]
},
"class_type": "ImageResizeKJ",
"_meta": {
"title": "Resize Image"
}
},
"168": {
"inputs": {
"frame_rate": 24,
"loop_count": 1,
"filename_prefix": "LivePortrait",
"format": "video/h264-mp4",
"pingpong": false,
"save_output": false,
"pix_fmt": "yuv420p",
"crf": 19,
"save_metadata": true,
"images": [
"181",
0
]
},
"class_type": "VHS_VideoCombine",
"_meta": {
"title": "Video Combine π₯π
₯π
π
’"
}
},
"180": {
"inputs": {
"eye_retargeting": true,
"eyes_retargeting_multiplier": 1,
"lip_retargeting": true,
"lip_retargeting_multiplier": 1,
"driving_crop_info": [
"197",
1
]
},
"class_type": "LivePortraitRetargeting",
"_meta": {
"title": "LivePortrait Retargeting"
}
},
"181": {
"inputs": {
"image": [
"191",
0
]
},
"class_type": "GetImageSizeAndCount",
"_meta": {
"title": "Get Image Size & Count"
}
},
"182": {
"inputs": {
"image": [
"199",
0
]
},
"class_type": "GetImageSizeAndCount",
"_meta": {
"title": "Get Image Size & Count"
}
},
"189": {
"inputs": {
"dsize": 512,
"scale": 2.3000000000000003,
"vx_ratio": 0,
"vy_ratio": -0.125,
"face_index": 0,
"face_index_order": "large-small",
"rotate": true,
"pipeline": [
"1",
0
],
"cropper": [
"198",
0
],
"source_image": [
"78",
0
]
},
"class_type": "LivePortraitCropper",
"_meta": {
"title": "LivePortrait Cropper"
}
},
"190": {
"inputs": {
"lip_zero": false,
"lip_zero_threshold": 0.03,
"stitching": true,
"delta_multiplier": 1,
"mismatch_method": "constant",
"relative_motion_mode": "relative",
"driving_smooth_observation_variance": 0.000003,
"pipeline": [
"1",
0
],
"crop_info": [
"189",
1
],
"source_image": [
"78",
0
],
"driving_images": [
"182",
0
]
},
"class_type": "LivePortraitProcess",
"_meta": {
"title": "LivePortrait Process"
}
},
"191": {
"inputs": {
"source_image": [
"78",
0
],
"cropped_image": [
"190",
0
],
"liveportrait_out": [
"190",
1
],
"mask": [
"81",
0
]
},
"class_type": "LivePortraitComposite",
"_meta": {
"title": "LivePortrait Composite"
}
},
"196": {
"inputs": {
"image": "{portraitgenerator}"
},
"class_type": "LoadImage",
"_meta": {
"title": "Load Image"
}
},
"197": {
"inputs": {
"dsize": 512,
"scale": 2.3000000000000003,
"vx_ratio": 0,
"vy_ratio": -0.125,
"face_index": 0,
"face_index_order": "large-small",
"rotate": true,
"pipeline": [
"1",
0
],
"cropper": [
"198",
0
],
"source_image": [
"182",
0
]
},
"class_type": "LivePortraitCropper",
"_meta": {
"title": "LivePortrait Cropper"
}
},
"198": {
"inputs": {
"landmarkrunner_onnx_device": "CPU",
"keep_model_loaded": true
},
"class_type": "LivePortraitLoadMediaPipeCropper",
"_meta": {
"title": "LivePortrait Load MediaPipeCropper"
}
},
"199": {
"inputs": {
"video": "https://drive.google.com/uc?export=download&id=1jOUIEO2r_i2JZZgKiDJKmURDffHTqyqA",
"force_rate": 0,
"force_size": "Disabled",
"custom_width": 512,
"custom_height": 512,
"frame_load_cap": 128,
"skip_first_frames": 0,
"select_every_nth": 1
},
"class_type": "VHS_LoadVideoPath",
"_meta": {
"title": "Load Video (Path) π₯π
₯π
π
’"
}
}
}
Last updated