Deprecated: The each() function is deprecated. This message will be suppressed on further calls in /home/zhenxiangba/zhenxiangba.com/public_html/phproxy-improved-master/index.php on line 456
ali-vilab (ali-vilab)
[go: Go Back, main page]

https://github.com/ali-vilab

\n

The Vision Lab is dedicated to the development of computer vision technologies, which can perceive, understand, produce, and process image and video content, and generate and reconstruct 3D scenes and objects. The Vision Lab provides technical support for services and applications that use videos and images to help customers identify business opportunities in a wide array of industries, such as: new retail, new media, and new manufacturing. These services and applications have been widely used in the interactive entertainment, digital intelligence education, and offline intelligence sectors.

\n","classNames":"hf-sanitized hf-sanitized-R1kAl66cV04pj1admgvDw"},"users":[{"_id":"623c6253389748c9f72ca287","avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/1654828369523-623c6253389748c9f72ca287.jpeg","isPro":false,"fullname":"wenmeng zhou","user":"wenmengzhou","type":"user"},{"_id":"62fc99cccad078c79728fbed","avatarUrl":"/avatars/76cb87f0a2e9b3de41e3f1b18ab7cb2f.svg","isPro":false,"fullname":"liuyuyuil","user":"liuyuyuil","type":"user"},{"_id":"64ef0730c0778dbdc337c63d","avatarUrl":"/avatars/046f28c9bc9a752253824bb78bae1b78.svg","isPro":false,"fullname":"zhang","user":"danielzyyyyy","type":"user"},{"_id":"6522cf31777019ca30d85725","avatarUrl":"/avatars/a180b096e438e429d445b68fe703e43f.svg","isPro":false,"fullname":"Lianghua Huang","user":"lhhuang","type":"user"},{"_id":"66d98c40dc8d2111492954f6","avatarUrl":"/avatars/c58392abce0b9b1152cc783b142b8061.svg","isPro":false,"fullname":"Chen Liang","user":"JasiLiang","type":"user"},{"_id":"6458970cab9a44f42f620a80","avatarUrl":"/avatars/f9779b0621c931f922440fec95342444.svg","isPro":false,"fullname":"chaojie mao","user":"chaojiemao","type":"user"},{"_id":"643d278b482011f5f2bd0fae","avatarUrl":"/avatars/70b8a7ffbfa2a1c4b6f5ff5e2b96b7bf.svg","isPro":false,"fullname":"jiangzeyinzi","user":"jiangzeyinzi","type":"user"},{"_id":"647ffabf28b737d7b9462eb2","avatarUrl":"/avatars/210441fc6645d08b36ad43734108f914.svg","isPro":false,"fullname":"Zhen Han","user":"hanzhn","type":"user"},{"_id":"65a9e000cab1bfcc3af8cfe2","avatarUrl":"/avatars/ddd0062dbeb5eb976841cec8a63c64a6.svg","isPro":false,"fullname":"Yulin Pan","user":"pan-yl","type":"user"}],"userCount":9,"collections":[{"slug":"ali-vilab/vace-67eca186ff3e3564726aff38","title":"VACE","description":"VACE: All-in-One Video Creation and Editing","gating":false,"lastUpdated":"2025-05-15T02:56:23.959Z","owner":{"_id":"65a537edd7841991690849d4","avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/623c6253389748c9f72ca287/Dxs373r3Ii6hEMfbFzqxI.png","fullname":"ali-vilab","name":"ali-vilab","type":"org","isHf":false,"isHfAdmin":false,"isMod":false,"followerCount":687,"isUserFollowing":false},"items":[{"_id":"67ecab8729b5bc34cda277e6","position":0,"type":"paper","id":"2503.07598","title":"VACE: All-in-One Video Creation and Editing","thumbnailUrl":"https://cdn-thumbnails.huggingface.co/social-thumbnails/papers/2503.07598.png","upvotes":56,"publishedAt":"2025-03-10T17:57:04.000Z","isUpvotedByUser":false},{"_id":"68254e7023d760cc451c6a12","position":1,"type":"model","author":"Wan-AI","authorData":{"_id":"67bc7cd418dd753c02a82684","avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/67b610677ea7952def8b29c6/N6jQbbeaa_FcUY-wI1dgG.png","fullname":"Wan-AI","name":"Wan-AI","type":"org","isHf":false,"isHfAdmin":false,"isMod":false,"followerCount":8444,"isUserFollowing":false},"downloads":3479,"gated":false,"id":"Wan-AI/Wan2.1-VACE-14B","availableInferenceProviders":[],"lastModified":"2025-05-19T03:16:32.000Z","likes":490,"pipeline_tag":"image-to-video","private":false,"repoType":"model","isLikedByUser":false},{"_id":"68254e7dae455f19808d9796","position":2,"type":"model","author":"Wan-AI","authorData":{"_id":"67bc7cd418dd753c02a82684","avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/67b610677ea7952def8b29c6/N6jQbbeaa_FcUY-wI1dgG.png","fullname":"Wan-AI","name":"Wan-AI","type":"org","isHf":false,"isHfAdmin":false,"isMod":false,"followerCount":8444,"isUserFollowing":false},"downloads":1441,"gated":false,"id":"Wan-AI/Wan2.1-VACE-1.3B","availableInferenceProviders":[],"lastModified":"2025-05-19T03:18:05.000Z","likes":124,"pipeline_tag":"image-to-video","private":false,"repoType":"model","isLikedByUser":false},{"_id":"67eca1b2351721d62aa4f966","position":3,"type":"model","author":"ali-vilab","authorData":{"_id":"65a537edd7841991690849d4","avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/623c6253389748c9f72ca287/Dxs373r3Ii6hEMfbFzqxI.png","fullname":"ali-vilab","name":"ali-vilab","type":"org","isHf":false,"isHfAdmin":false,"isMod":false,"followerCount":687,"isUserFollowing":false},"downloads":4025,"gated":false,"id":"ali-vilab/VACE-Wan2.1-1.3B-Preview","availableInferenceProviders":[],"lastModified":"2025-05-19T03:19:58.000Z","likes":126,"pipeline_tag":"image-to-video","private":false,"repoType":"model","isLikedByUser":false}],"position":0,"theme":"indigo","private":false,"shareUrl":"https://hf.co/collections/ali-vilab/vace","upvotes":34,"isUpvotedByUser":false}],"datasets":[{"author":"ali-vilab","downloads":1717,"gated":false,"id":"ali-vilab/VACE-Benchmark","lastModified":"2025-10-17T06:51:16.000Z","datasetsServerInfo":{"viewer":"viewer","numRows":696,"libraries":["datasets","mlcroissant"],"formats":[],"modalities":["video"]},"private":false,"repoType":"dataset","likes":20,"isLikedByUser":false,"isBenchmark":false},{"author":"ali-vilab","downloads":100,"gated":false,"id":"ali-vilab/ICE-Bench","lastModified":"2025-08-27T09:46:37.000Z","datasetsServerInfo":{"viewer":"preview","numRows":0,"libraries":[],"formats":[],"modalities":["image"]},"private":false,"repoType":"dataset","likes":1,"isLikedByUser":false,"isBenchmark":false},{"author":"ali-vilab","downloads":171,"gated":false,"id":"ali-vilab/IDEA-Bench","lastModified":"2025-01-08T06:25:57.000Z","private":false,"repoType":"dataset","likes":11,"isLikedByUser":false,"isBenchmark":false}],"models":[{"author":"ali-vilab","authorData":{"_id":"65a537edd7841991690849d4","avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/623c6253389748c9f72ca287/Dxs373r3Ii6hEMfbFzqxI.png","fullname":"ali-vilab","name":"ali-vilab","type":"org","isHf":false,"isHfAdmin":false,"isMod":false,"followerCount":687,"isUserFollowing":false},"downloads":3,"gated":false,"id":"ali-vilab/Unison","availableInferenceProviders":[],"lastModified":"2025-12-07T03:24:38.000Z","likes":2,"private":false,"repoType":"model","isLikedByUser":false},{"author":"ali-vilab","authorData":{"_id":"65a537edd7841991690849d4","avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/623c6253389748c9f72ca287/Dxs373r3Ii6hEMfbFzqxI.png","fullname":"ali-vilab","name":"ali-vilab","type":"org","isHf":false,"isHfAdmin":false,"isMod":false,"followerCount":687,"isUserFollowing":false},"downloads":4025,"gated":false,"id":"ali-vilab/VACE-Wan2.1-1.3B-Preview","availableInferenceProviders":[],"lastModified":"2025-05-19T03:19:58.000Z","likes":126,"pipeline_tag":"image-to-video","private":false,"repoType":"model","isLikedByUser":false},{"author":"ali-vilab","authorData":{"_id":"65a537edd7841991690849d4","avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/623c6253389748c9f72ca287/Dxs373r3Ii6hEMfbFzqxI.png","fullname":"ali-vilab","name":"ali-vilab","type":"org","isHf":false,"isHfAdmin":false,"isMod":false,"followerCount":687,"isUserFollowing":false},"downloads":131,"gated":false,"id":"ali-vilab/VACE-Annotators","availableInferenceProviders":[],"lastModified":"2025-05-15T06:53:58.000Z","likes":26,"private":false,"repoType":"model","isLikedByUser":false},{"author":"ali-vilab","authorData":{"_id":"65a537edd7841991690849d4","avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/623c6253389748c9f72ca287/Dxs373r3Ii6hEMfbFzqxI.png","fullname":"ali-vilab","name":"ali-vilab","type":"org","isHf":false,"isHfAdmin":false,"isMod":false,"followerCount":687,"isUserFollowing":false},"downloads":17,"gated":false,"id":"ali-vilab/VACE-LTX-Video-0.9","availableInferenceProviders":[],"lastModified":"2025-04-02T03:17:57.000Z","likes":28,"private":false,"repoType":"model","isLikedByUser":false},{"author":"ali-vilab","authorData":{"_id":"65a537edd7841991690849d4","avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/623c6253389748c9f72ca287/Dxs373r3Ii6hEMfbFzqxI.png","fullname":"ali-vilab","name":"ali-vilab","type":"org","isHf":false,"isHfAdmin":false,"isMod":false,"followerCount":687,"isUserFollowing":false},"downloads":22,"gated":false,"id":"ali-vilab/ACE_Plus","availableInferenceProviders":[],"lastModified":"2025-03-14T22:14:34.000Z","likes":300,"private":false,"repoType":"model","isLikedByUser":false},{"author":"ali-vilab","authorData":{"_id":"65a537edd7841991690849d4","avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/623c6253389748c9f72ca287/Dxs373r3Ii6hEMfbFzqxI.png","fullname":"ali-vilab","name":"ali-vilab","type":"org","isHf":false,"isHfAdmin":false,"isMod":false,"followerCount":687,"isUserFollowing":false},"downloads":1407,"gated":false,"id":"ali-vilab/In-Context-LoRA","availableInferenceProviders":[{"provider":"fal-ai","modelStatus":"live","providerStatus":"live","providerId":"fal-ai/flux-lora","task":"text-to-image","adapterType":"lora","isCheapestPricingOutput":false,"isFastestThroughput":false,"isModelAuthor":false},{"provider":"wavespeed","modelStatus":"live","providerStatus":"live","providerId":"wavespeed-ai/flux-dev-lora","task":"text-to-image","adapterType":"lora","isCheapestPricingOutput":false,"isFastestThroughput":false,"isModelAuthor":false},{"provider":"replicate","modelStatus":"live","providerStatus":"live","providerId":"black-forest-labs/flux-dev-lora","task":"text-to-image","adapterType":"lora","isCheapestPricingOutput":false,"isFastestThroughput":false,"isModelAuthor":false}],"lastModified":"2024-12-17T06:13:20.000Z","likes":633,"pipeline_tag":"text-to-image","private":false,"repoType":"model","isLikedByUser":false},{"author":"ali-vilab","authorData":{"_id":"65a537edd7841991690849d4","avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/623c6253389748c9f72ca287/Dxs373r3Ii6hEMfbFzqxI.png","fullname":"ali-vilab","name":"ali-vilab","type":"org","isHf":false,"isHfAdmin":false,"isMod":false,"followerCount":687,"isUserFollowing":false},"downloads":1064,"gated":false,"id":"ali-vilab/i2vgen-xl","availableInferenceProviders":[],"lastModified":"2024-02-09T13:57:25.000Z","likes":181,"pipeline_tag":"text-to-video","private":false,"repoType":"model","isLikedByUser":false},{"author":"ali-vilab","authorData":{"_id":"65a537edd7841991690849d4","avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/623c6253389748c9f72ca287/Dxs373r3Ii6hEMfbFzqxI.png","fullname":"ali-vilab","name":"ali-vilab","type":"org","isHf":false,"isHfAdmin":false,"isMod":false,"followerCount":687,"isUserFollowing":false},"downloads":11350,"gated":false,"id":"ali-vilab/text-to-video-ms-1.7b","availableInferenceProviders":[],"lastModified":"2023-12-01T07:52:12.000Z","likes":651,"pipeline_tag":"text-to-video","private":false,"repoType":"model","isLikedByUser":false},{"author":"ali-vilab","authorData":{"_id":"65a537edd7841991690849d4","avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/623c6253389748c9f72ca287/Dxs373r3Ii6hEMfbFzqxI.png","fullname":"ali-vilab","name":"ali-vilab","type":"org","isHf":false,"isHfAdmin":false,"isMod":false,"followerCount":687,"isUserFollowing":false},"downloads":2,"gated":false,"id":"ali-vilab/MS-Vid2Vid-XL","availableInferenceProviders":[],"lastModified":"2023-09-04T14:36:05.000Z","likes":55,"private":false,"repoType":"model","isLikedByUser":false},{"author":"ali-vilab","authorData":{"_id":"65a537edd7841991690849d4","avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/623c6253389748c9f72ca287/Dxs373r3Ii6hEMfbFzqxI.png","fullname":"ali-vilab","name":"ali-vilab","type":"org","isHf":false,"isHfAdmin":false,"isMod":false,"followerCount":687,"isUserFollowing":false},"downloads":12,"gated":false,"id":"ali-vilab/MS-Image2Video","availableInferenceProviders":[],"lastModified":"2023-09-04T08:51:11.000Z","likes":118,"private":false,"repoType":"model","isLikedByUser":false}],"paperPreviews":[],"spaces":[{"author":"ali-vilab","authorData":{"_id":"65a537edd7841991690849d4","avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/623c6253389748c9f72ca287/Dxs373r3Ii6hEMfbFzqxI.png","fullname":"ali-vilab","name":"ali-vilab","type":"org","isHf":false,"isHfAdmin":false,"isMod":false,"followerCount":687,"isUserFollowing":false},"colorFrom":"blue","colorTo":"green","createdAt":"2024-12-27T06:42:48.000Z","emoji":"📉","id":"ali-vilab/IDEA-Bench-Arena","lastModified":"2025-02-20T02:37:13.000Z","likes":6,"pinned":false,"private":false,"sdk":"gradio","repoType":"space","runtime":{"stage":"RUNNING","hardware":{"current":"cpu-basic","requested":"cpu-basic"},"storage":null,"gcTimeout":172800,"replicas":{"current":1,"requested":1},"devMode":false,"domains":[{"domain":"ali-vilab-idea-bench-arena.hf.space","stage":"READY"}],"sha":"8f50b5d99acbaff12de6c131a76c03da2dedfeec"},"shortDescription":"Official arena of IDEA-Bench.","title":"IDEA Bench Arena","isLikedByUser":false,"ai_short_description":"Generate and compare images using different models","ai_category":"Image Generation","trendingScore":0,"tags":["gradio","region:us"],"featured":false},{"author":"ali-vilab","authorData":{"_id":"65a537edd7841991690849d4","avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/623c6253389748c9f72ca287/Dxs373r3Ii6hEMfbFzqxI.png","fullname":"ali-vilab","name":"ali-vilab","type":"org","isHf":false,"isHfAdmin":false,"isMod":false,"followerCount":687,"isUserFollowing":false},"colorFrom":"indigo","colorTo":"purple","createdAt":"2023-08-25T05:03:22.000Z","emoji":"🔥","id":"ali-vilab/MS-Vid2Vid-XL-demo","lastModified":"2023-09-29T08:07:19.000Z","likes":23,"pinned":false,"private":false,"sdk":"docker","repoType":"space","runtime":{"stage":"RUNTIME_ERROR","hardware":{"current":null,"requested":"a10g-large"},"storage":null,"gcTimeout":3600,"errorMessage":"Exit code: 1. Reason: 5.65G [02:13<00:03, 43.6MB/s]\u001b[A\n\rnon_ema_0035000.pth: 98%|█████████▊| 5.53G/5.65G [02:14<00:02, 42.0MB/s]\u001b[A\n\rnon_ema_0035000.pth: 98%|█████████▊| 5.54G/5.65G [02:15<00:03, 32.5MB/s]\u001b[A\n\rnon_ema_0035000.pth: 98%|█████████▊| 5.56G/5.65G [02:15<00:02, 35.5MB/s]\u001b[A\n\rnon_ema_0035000.pth: 99%|█████████▊| 5.57G/5.65G [02:15<00:01, 39.0MB/s]\u001b[A\n\rnon_ema_0035000.pth: 99%|█████████▉| 5.58G/5.65G [02:16<00:01, 39.7MB/s]\u001b[A\n\rnon_ema_0035000.pth: 99%|█████████▉| 5.59G/5.65G [02:16<00:01, 34.1MB/s]\u001b[A\n\rnon_ema_0035000.pth: 99%|█████████▉| 5.61G/5.65G [02:16<00:00, 37.1MB/s]\u001b[A\n\rnon_ema_0035000.pth: 100%|█████████▉| 5.62G/5.65G [02:17<00:00, 37.5MB/s]\u001b[A\n\rnon_ema_0035000.pth: 100%|█████████▉| 5.64G/5.65G [02:17<00:00, 40.8MB/s]\u001b[A\n\rnon_ema_0035000.pth: 100%|██████████| 5.65G/5.65G [02:17<00:00, 39.4MB/s]\u001b[A\rnon_ema_0035000.pth: 100%|██████████| 5.65G/5.65G [02:17<00:00, 41.0MB/s]\n\rFetching 7 files: 71%|███████▏ | 5/7 [02:21<01:11, 36.00s/it]\rFetching 7 files: 100%|██████████| 7/7 [02:21<00:00, 20.22s/it]\nTraceback (most recent call last):\n File \"/home/user/app/app.py\", line 22, in \n pipe = pipeline(task=\"video-to-video\", model=model_dir.as_posix(), model_revision=\"v1.1.0\", device=\"cuda:0\")\n File \"/home/user/.pyenv/versions/3.10.12/lib/python3.10/site-packages/modelscope/pipelines/builder.py\", line 163, in pipeline\n clear_llm_info(kwargs)\n File \"/home/user/.pyenv/versions/3.10.12/lib/python3.10/site-packages/modelscope/pipelines/builder.py\", line 227, in clear_llm_info\n from .nlp.llm_pipeline import ModelTypeHelper\n File \"/home/user/.pyenv/versions/3.10.12/lib/python3.10/site-packages/modelscope/pipelines/nlp/llm_pipeline.py\", line 9, in \n from transformers import PreTrainedTokenizer\nModuleNotFoundError: No module named 'transformers'\n","replicas":{"requested":1},"devMode":false,"domains":[{"domain":"ali-vilab-ms-vid2vid-xl-demo.hf.space","stage":"READY"}]},"title":"ModelScope-Vid2Vid-XL","isLikedByUser":false,"trendingScore":0,"tags":["docker","region:us"],"featured":false},{"author":"ali-vilab","authorData":{"_id":"65a537edd7841991690849d4","avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/623c6253389748c9f72ca287/Dxs373r3Ii6hEMfbFzqxI.png","fullname":"ali-vilab","name":"ali-vilab","type":"org","isHf":false,"isHfAdmin":false,"isMod":false,"followerCount":687,"isUserFollowing":false},"colorFrom":"red","colorTo":"blue","createdAt":"2023-08-25T05:03:06.000Z","emoji":"🏢","id":"ali-vilab/MS-Image2Video-demo","lastModified":"2023-08-31T01:27:35.000Z","likes":9,"pinned":false,"private":false,"sdk":"docker","repoType":"space","runtime":{"stage":"PAUSED","hardware":{"current":null,"requested":"cpu-basic"},"storage":null,"gcTimeout":86400,"replicas":{"requested":1},"devMode":false,"domains":[{"domain":"ali-vilab-ms-image2video-demo.hf.space","stage":"READY"}]},"title":"ModelScope-Image2Video","isLikedByUser":false,"trendingScore":0,"tags":["docker","region:us"],"featured":false},{"author":"ali-vilab","authorData":{"_id":"65a537edd7841991690849d4","avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/623c6253389748c9f72ca287/Dxs373r3Ii6hEMfbFzqxI.png","fullname":"ali-vilab","name":"ali-vilab","type":"org","isHf":false,"isHfAdmin":false,"isMod":false,"followerCount":687,"isUserFollowing":false},"colorFrom":"pink","colorTo":"pink","createdAt":"2023-03-19T04:30:43.000Z","emoji":"🚀","id":"ali-vilab/modelscope-text-to-video-synthesis","lastModified":"2023-03-29T08:40:22.000Z","likes":1455,"pinned":false,"private":false,"sdk":"gradio","repoType":"space","runtime":{"stage":"RUNTIME_ERROR","hardware":{"current":null,"requested":"l4x1"},"storage":null,"gcTimeout":1800,"errorMessage":"Exit code: 1. Reason: /user/.pyenv/versions/3.10.18/lib/python3.10/contextlib.py\", line 135, in __enter__\n return next(self.gen)\n File \"/home/user/.pyenv/versions/3.10.18/lib/python3.10/site-packages/accelerate/big_modeling.py\", line 74, in init_empty_weights\n with init_on_device(torch.device(\"meta\"), include_buffers=include_buffers) as f:\n/home/user/.pyenv/versions/3.10.18/lib/python3.10/site-packages/accelerate/big_modeling.py:74: UserWarning: Failed to initialize NumPy: _ARRAY_API not found (Triggered internally at ../torch/csrc/utils/tensor_numpy.cpp:84.)\n with init_on_device(torch.device(\"meta\"), include_buffers=include_buffers) as f:\nTraceback (most recent call last):\n File \"/home/user/app/app.py\", line 25, in \n pipe = DiffusionPipeline.from_pretrained('damo-vilab/text-to-video-ms-1.7b',\n File \"/home/user/.pyenv/versions/3.10.18/lib/python3.10/site-packages/diffusers/pipelines/pipeline_utils.py\", line 930, in from_pretrained\n loaded_sub_model = load_sub_model(\n File \"/home/user/.pyenv/versions/3.10.18/lib/python3.10/site-packages/diffusers/pipelines/pipeline_utils.py\", line 385, in load_sub_model\n loaded_sub_model = load_method(os.path.join(cached_folder, name), **loading_kwargs)\n File \"/home/user/.pyenv/versions/3.10.18/lib/python3.10/site-packages/diffusers/schedulers/scheduling_utils.py\", line 146, in from_pretrained\n return cls.from_config(config, return_unused_kwargs=return_unused_kwargs, **kwargs)\n File \"/home/user/.pyenv/versions/3.10.18/lib/python3.10/site-packages/diffusers/configuration_utils.py\", line 218, in from_config\n model = cls(**init_dict)\n File \"/home/user/.pyenv/versions/3.10.18/lib/python3.10/site-packages/diffusers/configuration_utils.py\", line 596, in inner_init\n init(self, *args, **init_kwargs)\n File \"/home/user/.pyenv/versions/3.10.18/lib/python3.10/site-packages/diffusers/schedulers/scheduling_ddim.py\", line 176, in __init__\n self.timesteps = torch.from_numpy(np.arange(0, num_train_timesteps)[::-1].copy().astype(np.int64))\nRuntimeError: Numpy is not available\n","replicas":{"requested":1},"devMode":false,"domains":[{"domain":"ali-vilab-modelscope-text-to-video-synthesis.hf.space","stage":"READY"}]},"title":"ModelScope Text To Video Synthesis","isLikedByUser":false,"trendingScore":0,"tags":["gradio","region:us"],"featured":false}],"buckets":[],"numBuckets":0,"numDatasets":3,"numModels":12,"numSpaces":5,"lastOrgActivities":[],"acceptLanguages":["*"],"canReadRepos":false,"canReadSpaces":false,"blogPosts":[],"currentRepoPage":0,"filters":{},"paperView":false}">

AI & ML interests

None defined yet.

Alibaba TongYi Vision Intelligence Lab

https://github.com/ali-vilab

The Vision Lab is dedicated to the development of computer vision technologies, which can perceive, understand, produce, and process image and video content, and generate and reconstruct 3D scenes and objects. The Vision Lab provides technical support for services and applications that use videos and images to help customers identify business opportunities in a wide array of industries, such as: new retail, new media, and new manufacturing. These services and applications have been widely used in the interactive entertainment, digital intelligence education, and offline intelligence sectors.