douwantech / musev
- Public
- 353 runs
Prediction
douwantech/musev:af9bc528919b6ea93e934a0a65b9b6e846bc11c86b2e323d0c0c9d56e92b9496IDan4e312ajsrgm0cg62dtx9ngn8StatusSucceededSourceWebHardwareA100 (40GB)Total durationCreatedInput
- image_input
- https://general-api.oss-cn-hangzhou.aliyuncs.com/static/2.jpg
{ "image_input": "https://general-api.oss-cn-hangzhou.aliyuncs.com/static/2.jpg" }
Install Replicate’s Node.js client library:npm install replicate
Import and set up the client:import Replicate from "replicate"; import fs from "node:fs"; const replicate = new Replicate({ auth: process.env.REPLICATE_API_TOKEN, });
Run douwantech/musev using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
const output = await replicate.run( "douwantech/musev:af9bc528919b6ea93e934a0a65b9b6e846bc11c86b2e323d0c0c9d56e92b9496", { input: { image_input: "https://general-api.oss-cn-hangzhou.aliyuncs.com/static/2.jpg" } } ); // To access the file URL: console.log(output.url()); //=> "http://example.com" // To write the file to disk: fs.writeFile("my-image.png", output);
To learn more, take a look at the guide on getting started with Node.js.
Install Replicate’s Python client library:pip install replicate
Import the client:import replicate
Run douwantech/musev using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
output = replicate.run( "douwantech/musev:af9bc528919b6ea93e934a0a65b9b6e846bc11c86b2e323d0c0c9d56e92b9496", input={ "image_input": "https://general-api.oss-cn-hangzhou.aliyuncs.com/static/2.jpg" } ) print(output)
To learn more, take a look at the guide on getting started with Python.
Run douwantech/musev using Replicate’s API. Check out the model's schema for an overview of inputs and outputs.
curl -s -X POST \ -H "Authorization: Bearer $REPLICATE_API_TOKEN" \ -H "Content-Type: application/json" \ -H "Prefer: wait" \ -d $'{ "version": "douwantech/musev:af9bc528919b6ea93e934a0a65b9b6e846bc11c86b2e323d0c0c9d56e92b9496", "input": { "image_input": "https://general-api.oss-cn-hangzhou.aliyuncs.com/static/2.jpg" } }' \ https://api.replicate.com/v1/predictions
To learn more, take a look at Replicate’s HTTP API reference docs.
Output
{ "completed_at": "2024-06-19T13:14:24.032379Z", "created_at": "2024-06-19T13:06:41.942000Z", "data_removed": false, "error": null, "id": "an4e312ajsrgm0cg62dtx9ngn8", "input": { "image_input": "https://general-api.oss-cn-hangzhou.aliyuncs.com/static/2.jpg" }, "logs": "STDOUT: args\n{'add_static_video_prompt': False,\n'context_batch_size': 1,\n'context_frames': 12,\n'context_overlap': 4,\n'context_schedule': 'uniform_v2',\n'context_stride': 1,\n'cross_attention_dim': 768,\n'face_image_path': None,\n'facein_model_cfg_path': '/src/scripts/inference/../.././configs/model/facein.py',\n'facein_model_name': None,\n'facein_scale': 1.0,\n'fix_condition_images': False,\n'fixed_ip_adapter_image': True,\n'fixed_refer_face_image': True,\n'fixed_refer_image': True,\n'fps': 12,\n'guidance_scale': 7.5,\n'height': None,\n'img_length_ratio': 1.0,\n'img_weight': 0.001,\n'interpolation_factor': 1,\n'ip_adapter_face_model_cfg_path': '/src/scripts/inference/../.././configs/model/ip_adapter.py',\n'ip_adapter_face_model_name': None,\n'ip_adapter_face_scale': 1.0,\n'ip_adapter_model_cfg_path': '/src/scripts/inference/../.././configs/model/ip_adapter.py',\n'ip_adapter_model_name': 'musev_referencenet',\n'ip_adapter_scale': 1.0,\n'ipadapter_image_path': None,\n'lcm_model_cfg_path': '/src/scripts/inference/../.././configs/model/lcm_model.py',\n'lcm_model_name': None,\n'log_level': 'INFO',\n'motion_speed': 8.0,\n'n_batch': 1,\n'n_cols': 3,\n'n_repeat': 1,\n'n_vision_condition': 1,\n'need_hist_match': False,\n'need_img_based_video_noise': True,\n'negative_prompt': 'V2',\n'negprompt_cfg_path': '/src/scripts/inference/../../configs/model/negative_prompt.py',\n'noise_type': 'video_fusion',\n'num_inference_steps': 30,\n'output_dir': '/src/results/76c3325b-5d15-43c5-802a-b106ddc7514f',\n'overwrite': False,\n'prompt_only_use_image_prompt': False,\n'record_mid_video_latents': False,\n'record_mid_video_noises': False,\n'redraw_condition_image': False,\n'redraw_condition_image_with_facein': True,\n'redraw_condition_image_with_ip_adapter_face': True,\n'redraw_condition_image_with_ipdapter': True,\n'redraw_condition_image_with_referencenet': True,\n'referencenet_image_path': None,\n'referencenet_model_cfg_path': '/src/scripts/inference/../.././configs/model/referencenet.py',\n'referencenet_model_name': 'musev_referencenet',\n'save_filetype': 'mp4',\n'save_images': False,\n'sd_model_cfg_path': '/src/scripts/inference/../../configs/model/T2I_all_model.py',\n'sd_model_name': 'majicmixRealv6Fp16',\n'seed': None,\n'strength': 0.8,\n'target_datas': '76c3325b-5d15-43c5-802a-b106ddc7514f',\n'test_data_path': '/src/results/76c3325b-5d15-43c5-802a-b106ddc7514f/data.yaml',\n'time_size': 60,\n'unet_model_cfg_path': '/src/scripts/inference/../.././configs/model/motion_model.py',\n'unet_model_name': 'musev_referencenet',\n'use_condition_image': True,\n'vae_model_path': './checkpoints/vae/sd-vae-ft-mse',\n'video_guidance_scale': 3.5,\n'video_guidance_scale_end': None,\n'video_guidance_scale_method': 'linear',\n'video_negative_prompt': 'V2',\n'video_num_inference_steps': 10,\n'video_overlap': 1,\n'vision_clip_extractor_class_name': 'ImageClipVisionFeatureExtractor',\n'vision_clip_model_path': './checkpoints/IP-Adapter/models/image_encoder',\n'w_ind_noise': 0.5,\n'width': None,\n'write_info': False}\nrunning model, T2I SD\n{'majicmixRealv6Fp16': {'sd': '/src/configs/model/../../checkpoints/t2i/sd1.5/majicmixRealv6Fp16'}}\nlcm: None None\nunet_model_params_dict_src dict_keys(['musev', 'musev_referencenet', 'musev_referencenet_pose'])\nunet: musev_referencenet /src/configs/model/../../checkpoints/motion/musev_referencenet\nreferencenet_model_params_dict_src dict_keys(['musev_referencenet'])\nreferencenet: musev_referencenet /src/configs/model/../../checkpoints/motion/musev_referencenet\nip_adapter_model_params_dict_src dict_keys(['IPAdapter', 'IPAdapterPlus', 'IPAdapterPlus-face', 'IPAdapterFaceID', 'musev_referencenet', 'musev_referencenet_pose'])\nip_adapter: musev_referencenet {'ip_image_encoder': '/src/configs/model/../../checkpoints/IP-Adapter/image_encoder', 'ip_ckpt': '/src/configs/model/../../checkpoints/motion/musev_referencenet/ip_adapter_image_proj.bin', 'ip_scale': 1.0, 'clip_extra_context_tokens': 4, 'clip_embeddings_dim': 1024, 'desp': ''}\nfacein: None None\nip_adapter_face: None None\nvideo_negprompt V2 badhandv4, ng_deepnegative_v1_75t, (((multiple heads))), (((bad body))), (((two people))), ((extra arms)), ((deformed body)), (((sexy))), paintings,(((two heads))), ((big head)),sketches, (worst quality:2), (low quality:2), (normal quality:2), lowres, ((monochrome)), ((grayscale)), skin spots, acnes, skin blemishes, age spot, glans, (((nsfw))), nipples, extra fingers, (extra legs), (long neck), mutated hands, (fused fingers), (too many fingers)\nnegprompt V2 badhandv4, ng_deepnegative_v1_75t, (((multiple heads))), (((bad body))), (((two people))), ((extra arms)), ((deformed body)), (((sexy))), paintings,(((two heads))), ((big head)),sketches, (worst quality:2), (low quality:2), (normal quality:2), lowres, ((monochrome)), ((grayscale)), skin spots, acnes, skin blemishes, age spot, glans, (((nsfw))), nipples, extra fingers, (extra legs), (long neck), mutated hands, (fused fingers), (too many fingers)\nn_test_datas 1\n2024-06-19 13:11:00,805- musev:882- INFO- vision_clip_extractor, name=ImageClipVisionFeatureExtractor, path=./checkpoints/IP-Adapter/models/image_encoder\ntest_model_vae_model_path ./checkpoints/vae/sd-vae-ft-mse\ni_test_data 0 majicmixRealv6Fp16\n{'condition_images': '/src/results/76c3325b-5d15-43c5-802a-b106ddc7514f/condition_image.jpg',\n'eye_blinks_factor': 1.2,\n'height': 1076,\n'img_length_ratio': 0.957,\n'ipadapter_image': '/src/results/76c3325b-5d15-43c5-802a-b106ddc7514f/condition_image.jpg',\n'name': '76c3325b-5d15-43c5-802a-b106ddc7514f',\n'origin_prompt': '(masterpiece, best quality, highres:1),(human, solo:1),(eye '\n'blinks:1.2),(head wave:1.8)',\n'prompt': '(masterpiece, best quality, highres:1),(human, solo:1),(eye '\n'blinks:1.2),(head wave:1.8)',\n'prompt_hash': '046da',\n'refer_image': '/src/results/76c3325b-5d15-43c5-802a-b106ddc7514f/condition_image.jpg',\n'video_path': None,\n'width': 807}\ntest_data_height=1024\ntest_data_width=768\noutput_path /src/results/76c3325b-5d15-43c5-802a-b106ddc7514f/m=majicmixRealv6Fp16_rm=musev_referencenet_case=76c3325b-5d15-43c5-802a-b106ddc7514f_w=768_h=1024_t=60_nb=1_s=16397390_p=046da_w=0.001_ms=8.0_s=0.8_g=3.5_c-i=condi_r-c=False_w=0.5_V2_r=con_ip=con_f=no.mp4\nSave to /src/results/76c3325b-5d15-43c5-802a-b106ddc7514f/m=majicmixRealv6Fp16_rm=musev_referencenet_case=76c3325b-5d15-43c5-802a-b106ddc7514f_w=768_h=1024_t=60_nb=1_s=16397390_p=046da_w=0.001_ms=8.0_s=0.8_g=3.5_c-i=condi_r-c=False_w=0.5_V2_r=con_ip=con_f=no.mp4\nSTDERR: The cache for model files in Transformers v4.22.0 has been updated. Migrating your old cache. This is a one-time only operation. You can interrupt this and resume the migration later on by calling `transformers.utils.move_cache()`.\n0it [00:00, ?it/s]\n0it [00:00, ?it/s]\nKeyword arguments {'torch_device': 'cuda'} are not expected by MusevControlNetPipeline and will be ignored.\nLoading pipeline components...: 0%| | 0/6 [00:00<?, ?it/s]\nLoading pipeline components...: 33%|███▎ | 2/6 [00:00<00:00, 14.90it/s]\nLoading pipeline components...: 67%|██████▋ | 4/6 [00:00<00:00, 6.18it/s]\nLoading pipeline components...: 100%|██████████| 6/6 [00:00<00:00, 10.06it/s]\n0%| | 0/10 [00:00<?, ?it/s]\n10%|█ | 1/10 [00:29<04:26, 29.62s/it]\n20%|██ | 2/10 [00:44<02:48, 21.08s/it]\n30%|███ | 3/10 [00:59<02:08, 18.36s/it]\n40%|████ | 4/10 [01:14<01:42, 17.09s/it]\n50%|█████ | 5/10 [01:30<01:21, 16.38s/it]\n60%|██████ | 6/10 [01:45<01:03, 15.96s/it]\n70%|███████ | 7/10 [02:00<00:47, 15.69s/it]\n80%|████████ | 8/10 [02:15<00:31, 15.51s/it]\n90%|█████████ | 9/10 [02:30<00:15, 15.39s/it]\n100%|██████████| 10/10 [02:45<00:00, 15.31s/it]\n100%|██████████| 10/10 [02:45<00:00, 16.58s/it]", "metrics": { "predict_time": 224.686865524, "total_time": 462.090379 }, "output": "https://replicate.delivery/pbxt/ncH7zCUULoppIZtOAs9EaSIw45WxQOsrOcf4bsv23SmXXEgJA/result.mp4", "started_at": "2024-06-19T13:10:39.345513Z", "status": "succeeded", "urls": { "get": "https://api.replicate.com/v1/predictions/an4e312ajsrgm0cg62dtx9ngn8", "cancel": "https://api.replicate.com/v1/predictions/an4e312ajsrgm0cg62dtx9ngn8/cancel" }, "version": "af9bc528919b6ea93e934a0a65b9b6e846bc11c86b2e323d0c0c9d56e92b9496" }
Generated inSTDOUT: args {'add_static_video_prompt': False, 'context_batch_size': 1, 'context_frames': 12, 'context_overlap': 4, 'context_schedule': 'uniform_v2', 'context_stride': 1, 'cross_attention_dim': 768, 'face_image_path': None, 'facein_model_cfg_path': '/src/scripts/inference/../.././configs/model/facein.py', 'facein_model_name': None, 'facein_scale': 1.0, 'fix_condition_images': False, 'fixed_ip_adapter_image': True, 'fixed_refer_face_image': True, 'fixed_refer_image': True, 'fps': 12, 'guidance_scale': 7.5, 'height': None, 'img_length_ratio': 1.0, 'img_weight': 0.001, 'interpolation_factor': 1, 'ip_adapter_face_model_cfg_path': '/src/scripts/inference/../.././configs/model/ip_adapter.py', 'ip_adapter_face_model_name': None, 'ip_adapter_face_scale': 1.0, 'ip_adapter_model_cfg_path': '/src/scripts/inference/../.././configs/model/ip_adapter.py', 'ip_adapter_model_name': 'musev_referencenet', 'ip_adapter_scale': 1.0, 'ipadapter_image_path': None, 'lcm_model_cfg_path': '/src/scripts/inference/../.././configs/model/lcm_model.py', 'lcm_model_name': None, 'log_level': 'INFO', 'motion_speed': 8.0, 'n_batch': 1, 'n_cols': 3, 'n_repeat': 1, 'n_vision_condition': 1, 'need_hist_match': False, 'need_img_based_video_noise': True, 'negative_prompt': 'V2', 'negprompt_cfg_path': '/src/scripts/inference/../../configs/model/negative_prompt.py', 'noise_type': 'video_fusion', 'num_inference_steps': 30, 'output_dir': '/src/results/76c3325b-5d15-43c5-802a-b106ddc7514f', 'overwrite': False, 'prompt_only_use_image_prompt': False, 'record_mid_video_latents': False, 'record_mid_video_noises': False, 'redraw_condition_image': False, 'redraw_condition_image_with_facein': True, 'redraw_condition_image_with_ip_adapter_face': True, 'redraw_condition_image_with_ipdapter': True, 'redraw_condition_image_with_referencenet': True, 'referencenet_image_path': None, 'referencenet_model_cfg_path': '/src/scripts/inference/../.././configs/model/referencenet.py', 'referencenet_model_name': 'musev_referencenet', 'save_filetype': 'mp4', 'save_images': False, 'sd_model_cfg_path': '/src/scripts/inference/../../configs/model/T2I_all_model.py', 'sd_model_name': 'majicmixRealv6Fp16', 'seed': None, 'strength': 0.8, 'target_datas': '76c3325b-5d15-43c5-802a-b106ddc7514f', 'test_data_path': '/src/results/76c3325b-5d15-43c5-802a-b106ddc7514f/data.yaml', 'time_size': 60, 'unet_model_cfg_path': '/src/scripts/inference/../.././configs/model/motion_model.py', 'unet_model_name': 'musev_referencenet', 'use_condition_image': True, 'vae_model_path': './checkpoints/vae/sd-vae-ft-mse', 'video_guidance_scale': 3.5, 'video_guidance_scale_end': None, 'video_guidance_scale_method': 'linear', 'video_negative_prompt': 'V2', 'video_num_inference_steps': 10, 'video_overlap': 1, 'vision_clip_extractor_class_name': 'ImageClipVisionFeatureExtractor', 'vision_clip_model_path': './checkpoints/IP-Adapter/models/image_encoder', 'w_ind_noise': 0.5, 'width': None, 'write_info': False} running model, T2I SD {'majicmixRealv6Fp16': {'sd': '/src/configs/model/../../checkpoints/t2i/sd1.5/majicmixRealv6Fp16'}} lcm: None None unet_model_params_dict_src dict_keys(['musev', 'musev_referencenet', 'musev_referencenet_pose']) unet: musev_referencenet /src/configs/model/../../checkpoints/motion/musev_referencenet referencenet_model_params_dict_src dict_keys(['musev_referencenet']) referencenet: musev_referencenet /src/configs/model/../../checkpoints/motion/musev_referencenet ip_adapter_model_params_dict_src dict_keys(['IPAdapter', 'IPAdapterPlus', 'IPAdapterPlus-face', 'IPAdapterFaceID', 'musev_referencenet', 'musev_referencenet_pose']) ip_adapter: musev_referencenet {'ip_image_encoder': '/src/configs/model/../../checkpoints/IP-Adapter/image_encoder', 'ip_ckpt': '/src/configs/model/../../checkpoints/motion/musev_referencenet/ip_adapter_image_proj.bin', 'ip_scale': 1.0, 'clip_extra_context_tokens': 4, 'clip_embeddings_dim': 1024, 'desp': ''} facein: None None ip_adapter_face: None None video_negprompt V2 badhandv4, ng_deepnegative_v1_75t, (((multiple heads))), (((bad body))), (((two people))), ((extra arms)), ((deformed body)), (((sexy))), paintings,(((two heads))), ((big head)),sketches, (worst quality:2), (low quality:2), (normal quality:2), lowres, ((monochrome)), ((grayscale)), skin spots, acnes, skin blemishes, age spot, glans, (((nsfw))), nipples, extra fingers, (extra legs), (long neck), mutated hands, (fused fingers), (too many fingers) negprompt V2 badhandv4, ng_deepnegative_v1_75t, (((multiple heads))), (((bad body))), (((two people))), ((extra arms)), ((deformed body)), (((sexy))), paintings,(((two heads))), ((big head)),sketches, (worst quality:2), (low quality:2), (normal quality:2), lowres, ((monochrome)), ((grayscale)), skin spots, acnes, skin blemishes, age spot, glans, (((nsfw))), nipples, extra fingers, (extra legs), (long neck), mutated hands, (fused fingers), (too many fingers) n_test_datas 1 2024-06-19 13:11:00,805- musev:882- INFO- vision_clip_extractor, name=ImageClipVisionFeatureExtractor, path=./checkpoints/IP-Adapter/models/image_encoder test_model_vae_model_path ./checkpoints/vae/sd-vae-ft-mse i_test_data 0 majicmixRealv6Fp16 {'condition_images': '/src/results/76c3325b-5d15-43c5-802a-b106ddc7514f/condition_image.jpg', 'eye_blinks_factor': 1.2, 'height': 1076, 'img_length_ratio': 0.957, 'ipadapter_image': '/src/results/76c3325b-5d15-43c5-802a-b106ddc7514f/condition_image.jpg', 'name': '76c3325b-5d15-43c5-802a-b106ddc7514f', 'origin_prompt': '(masterpiece, best quality, highres:1),(human, solo:1),(eye ' 'blinks:1.2),(head wave:1.8)', 'prompt': '(masterpiece, best quality, highres:1),(human, solo:1),(eye ' 'blinks:1.2),(head wave:1.8)', 'prompt_hash': '046da', 'refer_image': '/src/results/76c3325b-5d15-43c5-802a-b106ddc7514f/condition_image.jpg', 'video_path': None, 'width': 807} test_data_height=1024 test_data_width=768 output_path /src/results/76c3325b-5d15-43c5-802a-b106ddc7514f/m=majicmixRealv6Fp16_rm=musev_referencenet_case=76c3325b-5d15-43c5-802a-b106ddc7514f_w=768_h=1024_t=60_nb=1_s=16397390_p=046da_w=0.001_ms=8.0_s=0.8_g=3.5_c-i=condi_r-c=False_w=0.5_V2_r=con_ip=con_f=no.mp4 Save to /src/results/76c3325b-5d15-43c5-802a-b106ddc7514f/m=majicmixRealv6Fp16_rm=musev_referencenet_case=76c3325b-5d15-43c5-802a-b106ddc7514f_w=768_h=1024_t=60_nb=1_s=16397390_p=046da_w=0.001_ms=8.0_s=0.8_g=3.5_c-i=condi_r-c=False_w=0.5_V2_r=con_ip=con_f=no.mp4 STDERR: The cache for model files in Transformers v4.22.0 has been updated. Migrating your old cache. This is a one-time only operation. You can interrupt this and resume the migration later on by calling `transformers.utils.move_cache()`. 0it [00:00, ?it/s] 0it [00:00, ?it/s] Keyword arguments {'torch_device': 'cuda'} are not expected by MusevControlNetPipeline and will be ignored. Loading pipeline components...: 0%| | 0/6 [00:00<?, ?it/s] Loading pipeline components...: 33%|███▎ | 2/6 [00:00<00:00, 14.90it/s] Loading pipeline components...: 67%|██████▋ | 4/6 [00:00<00:00, 6.18it/s] Loading pipeline components...: 100%|██████████| 6/6 [00:00<00:00, 10.06it/s] 0%| | 0/10 [00:00<?, ?it/s] 10%|█ | 1/10 [00:29<04:26, 29.62s/it] 20%|██ | 2/10 [00:44<02:48, 21.08s/it] 30%|███ | 3/10 [00:59<02:08, 18.36s/it] 40%|████ | 4/10 [01:14<01:42, 17.09s/it] 50%|█████ | 5/10 [01:30<01:21, 16.38s/it] 60%|██████ | 6/10 [01:45<01:03, 15.96s/it] 70%|███████ | 7/10 [02:00<00:47, 15.69s/it] 80%|████████ | 8/10 [02:15<00:31, 15.51s/it] 90%|█████████ | 9/10 [02:30<00:15, 15.39s/it] 100%|██████████| 10/10 [02:45<00:00, 15.31s/it] 100%|██████████| 10/10 [02:45<00:00, 16.58s/it]
Want to make some of these yourself?
Run this model