Deprecated: The each() function is deprecated. This message will be suppressed on further calls in /home/zhenxiangba/zhenxiangba.com/public_html/phproxy-improved-master/index.php on line 456 urroxyz (Urro)
https://huggingface.co/unsloth/GLM-4.7-Flash-REAP-23B-A3B-GGUF","text":"30 on the Artificial Analysis Intelligence Index (Jan '26), beating GPT-OSS 20B, and only 3 points behind the larger 120B variant. More than HALF as intelligent as its big sibling, GLM 4.7 (Reasoning). Only 23B when pruned for \"unused\" experts. Uniquely good for its size, and MoE; only 3B params active per token. https://artificialanalysis.ai/models/glm-4-7-flash\n\nGGUF: unsloth/GLM-4.7-Flash-REAP-23B-A3B-GGUF\nhttps://huggingface.co/unsloth/GLM-4.7-Flash-REAP-23B-A3B-GGUF"},"author":"cerebras","authorData":{"_id":"640fbc14208821a59b767b3e","avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/1678752782877-640fbb91bd54b1efde3678b2.png","fullname":"Cerebras","name":"cerebras","type":"org","isHf":false,"isHfAdmin":false,"isMod":false,"plan":"team","followerCount":1393,"isUserFollowing":false},"downloads":9549,"gated":false,"id":"cerebras/GLM-4.7-Flash-REAP-23B-A3B","availableInferenceProviders":[],"lastModified":"2026-01-23T05:25:40.000Z","likes":61,"pipeline_tag":"text-generation","private":false,"repoType":"model","isLikedByUser":false,"widgetOutputUrls":[],"numParameters":22996118432},{"_id":"697842bd8b74620751103413","position":1,"type":"model","note":{"html":"Beats Qwen3 4B Thinking... But it's not a thinking model. Just instruct! Same param count. ","text":"Beats Qwen3 4B Thinking... But it's not a thinking model. Just instruct! Same param count. "},"author":"janhq","authorData":{"_id":"657a73e3d3e458405b9aba83","avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/65713d70f56f9538679e5a56/wsLEmcf8jbeNe7DO-SLv4.png","fullname":"Jan","name":"janhq","type":"org","isHf":false,"isHfAdmin":false,"isMod":false,"plan":"team","followerCount":807,"isUserFollowing":false},"downloads":3225,"gated":false,"id":"janhq/Jan-v3-4B-base-instruct","availableInferenceProviders":[],"lastModified":"2026-02-02T13:15:40.000Z","likes":48,"pipeline_tag":"text-generation","private":false,"repoType":"model","isLikedByUser":false,"widgetOutputUrls":[],"numParameters":4411424256},{"_id":"695d2a49af2e5efffeb78c2a","position":2,"type":"model","note":{"html":"Doesn't usually overthink, massive improvement over the previous 1.5 model. Outstanding intelligence for a 15B model.","text":"Doesn't usually overthink, massive improvement over the previous 1.5 model. Outstanding intelligence for a 15B model."},"author":"ServiceNow-AI","authorData":{"_id":"65f4df5de83b55da5d79fbb6","avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/63d3095c2727d7888cbb54e2/Uv-Lx8PVGviqokfOyYlCN.png","fullname":"ServiceNow-AI","name":"ServiceNow-AI","type":"org","isHf":false,"isHfAdmin":false,"isMod":false,"plan":"enterprise","followerCount":708,"isUserFollowing":false},"downloads":4187,"gated":false,"id":"ServiceNow-AI/Apriel-1.6-15b-Thinker","availableInferenceProviders":[{"provider":"together","modelStatus":"live","providerStatus":"live","providerId":"ServiceNow-AI/Apriel-1.6-15b-Thinker","task":"conversational","adapterWeightsPath":"model-00001-of-00007.safetensors","features":{"structuredOutput":false,"toolCalling":false},"isCheapestPricingOutput":true,"isFastestThroughput":true,"isModelAuthor":false,"tokensPerSecond":62.96757957111923,"pricingOutput":0}],"lastModified":"2025-12-22T16:58:17.000Z","likes":283,"pipeline_tag":"image-text-to-text","private":false,"repoType":"model","isLikedByUser":false,"widgetOutputUrls":[]},{"_id":"698df043ac54d3a59f0e0369","position":3,"type":"model","note":{"html":"Outperforms Qwen 30B models at almost 1/10 the size.","text":"Outperforms Qwen 30B models at almost 1/10 the size."},"author":"Nanbeige","authorData":{"_id":"6533c00a9860c1cb37bff25f","avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/646f0d118ff94af23bc44aab/GXHCollpMRgvYqUXQ2BQ7.png","fullname":"Nanbeige LLM Lab","name":"Nanbeige","type":"org","isHf":false,"isHfAdmin":false,"isMod":false,"followerCount":477,"isUserFollowing":false},"downloads":104051,"gated":false,"id":"Nanbeige/Nanbeige4.1-3B","availableInferenceProviders":[{"provider":"featherless-ai","modelStatus":"live","providerStatus":"live","providerId":"Nanbeige/Nanbeige4.1-3B","task":"conversational","adapterWeightsPath":"model-00001-of-00002.safetensors","isCheapestPricingOutput":false,"isFastestThroughput":false,"isModelAuthor":false}],"lastModified":"2026-02-19T02:07:03.000Z","likes":615,"pipeline_tag":"text-generation","private":false,"repoType":"model","isLikedByUser":false,"widgetOutputUrls":[],"numParameters":3933637120}],"position":1,"theme":"pink","private":false,"shareUrl":"https://hf.co/collections/urroxyz/tiny-models-with-big-intelligence","upvotes":3,"isUpvotedByUser":false},{"slug":"urroxyz/ethically-decent-and-legally-adjacent-6942221e384b085c9577ad63","title":"ETHICALLY-DECENT & LEGALLY-ADJACENT","description":"Depending on your definitions, these models may not be strictly \"ethical\" or \"legal\", yet they are 100% more ethical and legal than GPT or Claude.","gating":false,"lastUpdated":"2026-02-19T17:41:58.338Z","owner":{"_id":"661ab1f1fa3b144a381fa454","avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/661ab1f1fa3b144a381fa454/IlpZBb9NCjo7ntFwMIH53.png","fullname":"Urro","name":"urroxyz","type":"user","isPro":false,"isHf":false,"isHfAdmin":false,"isMod":false,"followerCount":3,"isUserFollowing":false},"items":[{"_id":"6942b9d4a2e650e5f9be5698","position":0,"type":"model","note":{"html":"Granite 4.0 is open-sourced under the Apache 2.0 license and is the first model family to receive ISO 42001 certification for AI safety and transparency.\n\nPlease take a look at the other 4.0 models available:\nhttps://huggingface.co/collections/ibm-granite/granite-40-nano-language-models\nhttps://huggingface.co/collections/ibm-granite/granite-40-language-models\n\nReasoning is not yet available but expected in the near future.","text":"Granite 4.0 is open-sourced under the Apache 2.0 license and is the first model family to receive ISO 42001 certification for AI safety and transparency.\n\nPlease take a look at the other 4.0 models available:\nhttps://huggingface.co/collections/ibm-granite/granite-40-nano-language-models\nhttps://huggingface.co/collections/ibm-granite/granite-40-language-models\n\nReasoning is not yet available but expected in the near future."},"author":"ibm-granite","authorData":{"_id":"6624c149a8d1362ebc6bc6da","avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/639bcaa2445b133a4e942436/CEW-OjXkRkDNmTxSu8Egh.png","fullname":"IBM Granite","name":"ibm-granite","type":"org","isHf":false,"isHfAdmin":false,"isMod":false,"plan":"enterprise","followerCount":3955,"isUserFollowing":false},"downloads":56496,"gated":false,"id":"ibm-granite/granite-4.0-h-small","availableInferenceProviders":[],"lastModified":"2025-11-03T19:45:37.000Z","likes":296,"pipeline_tag":"text-generation","private":false,"repoType":"model","isLikedByUser":false,"widgetOutputUrls":[]},{"_id":"6942246e3911a5aea98489ed","position":1,"type":"model","note":{"html":"REASONING MODEL (namely, for coding tasks)\n\nIMPORTANT NOTE: It is possible that some of the prompt data (user input) on which this model was trained was synthesized with proprietary models, however, as inferred from the transparency report, all model output was either (1) taken from permissive datasets (many of which were filtered meta-datasets) or (2) synthesized with internal permissive models.\n\nThank you to IBM for making one of the first quality instruct models with high ethics and legality.","text":"REASONING MODEL (namely, for coding tasks)\n\nIMPORTANT NOTE: It is possible that some of the prompt data (user input) on which this model was trained was synthesized with proprietary models, however, as inferred from the transparency report, all model output was either (1) taken from permissive datasets (many of which were filtered meta-datasets) or (2) synthesized with internal permissive models.\n\nThank you to IBM for making one of the first quality instruct models with high ethics and legality."},"author":"ibm-granite","authorData":{"_id":"6624c149a8d1362ebc6bc6da","avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/639bcaa2445b133a4e942436/CEW-OjXkRkDNmTxSu8Egh.png","fullname":"IBM Granite","name":"ibm-granite","type":"org","isHf":false,"isHfAdmin":false,"isMod":false,"plan":"enterprise","followerCount":3955,"isUserFollowing":false},"downloads":54517,"gated":false,"id":"ibm-granite/granite-3.3-8b-instruct","availableInferenceProviders":[],"lastModified":"2025-05-12T06:39:42.000Z","likes":152,"pipeline_tag":"text-generation","private":false,"repoType":"model","isLikedByUser":false,"widgetOutputUrls":[],"numParameters":8170864640},{"_id":"694223cf384b085c9577cdfa","position":2,"type":"model","note":{"html":"INSTRUCT MODEL (namely, for coding tasks)\n\nIMPORTANT NOTE: It is possible that some of the prompt data (user input) on which this model was trained was synthesized with proprietary models, however, as inferred from the transparency report, all model output was either (1) taken from permissive datasets (many of which were filtered meta-datasets) or (2) synthesized with internal permissive models.\n\nThank you to IBM for making one of the first quality instruct models with high ethics and legality.","text":"INSTRUCT MODEL (namely, for coding tasks)\n\nIMPORTANT NOTE: It is possible that some of the prompt data (user input) on which this model was trained was synthesized with proprietary models, however, as inferred from the transparency report, all model output was either (1) taken from permissive datasets (many of which were filtered meta-datasets) or (2) synthesized with internal permissive models.\n\nThank you to IBM for making one of the first quality instruct models with high ethics and legality."},"author":"ibm-granite","authorData":{"_id":"6624c149a8d1362ebc6bc6da","avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/639bcaa2445b133a4e942436/CEW-OjXkRkDNmTxSu8Egh.png","fullname":"IBM Granite","name":"ibm-granite","type":"org","isHf":false,"isHfAdmin":false,"isMod":false,"plan":"enterprise","followerCount":3955,"isUserFollowing":false},"downloads":17046,"gated":false,"id":"ibm-granite/granite-3.0-8b-instruct","availableInferenceProviders":[],"lastModified":"2024-12-19T19:44:27.000Z","likes":205,"pipeline_tag":"text-generation","private":false,"repoType":"model","isLikedByUser":false,"widgetOutputUrls":[]},{"_id":"6942221e384b085c9577ad64","position":3,"type":"model","note":{"html":"BASE MODEL\n\nThere are many other checkpoints.\nhttps://huggingface.co/alea-institute/models","text":"BASE MODEL\n\nThere are many other checkpoints.\nhttps://huggingface.co/alea-institute/models"},"author":"alea-institute","authorData":{"_id":"66d910c09d69b508538473da","avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/7yvCVKRsJDkZeoaaKSw-Q.png","fullname":"ALEA Institute","name":"alea-institute","type":"user","isPro":true,"isHf":false,"isHfAdmin":false,"isMod":false,"followerCount":44,"isUserFollowing":false},"downloads":46,"gated":false,"id":"alea-institute/kl3m-003-1.7b","availableInferenceProviders":[],"lastModified":"2025-04-10T23:46:39.000Z","likes":4,"pipeline_tag":"text-generation","private":false,"repoType":"model","isLikedByUser":false,"widgetOutputUrls":[],"numParameters":1745686528}],"position":2,"theme":"pink","private":false,"shareUrl":"https://hf.co/collections/urroxyz/ethically-decent-and-legally-adjacent","upvotes":1,"isUpvotedByUser":false},{"slug":"urroxyz/human-written-and-legally-sourced-693a2f1683594db696d8e3cb","title":"HUMAN-WRITTEN & LEGALLY-SOURCED*","description":"Datasets written by humans and/or reverse-engineered from text with deterministic algorithms. No illegal scraping or unethical synthesis *...mostly.","gating":false,"lastUpdated":"2026-02-19T17:42:05.299Z","owner":{"_id":"661ab1f1fa3b144a381fa454","avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/661ab1f1fa3b144a381fa454/IlpZBb9NCjo7ntFwMIH53.png","fullname":"Urro","name":"urroxyz","type":"user","isPro":false,"isHf":false,"isHfAdmin":false,"isMod":false,"followerCount":3,"isUserFollowing":false},"items":[{"_id":"694b780e7890558bf06f497f","position":0,"type":"dataset","note":{"html":"Amazing dataset!!! 7 Common Crawl releases filtered and annotated for CC licensing. (only some 100M unique rows)\n\nPlease view similar projects (smaller/less granular):\nhttps://huggingface.co/datasets/common-pile/dolma-cccc (259M rows, eng only)\nhttps://data.commoncrawl.org/contrib/c4corpus/CC-MAIN-2016-07/index.html (dated multilingual)","text":"Amazing dataset!!! 7 Common Crawl releases filtered and annotated for CC licensing. (only some 100M unique rows)\n\nPlease view similar projects (smaller/less granular):\nhttps://huggingface.co/datasets/common-pile/dolma-cccc (259M rows, eng only)\nhttps://data.commoncrawl.org/contrib/c4corpus/CC-MAIN-2016-07/index.html (dated multilingual)"},"author":"BramVanroy","downloads":983,"gated":false,"id":"BramVanroy/CommonCrawl-CreativeCommons","lastModified":"2025-08-28T09:23:12.000Z","datasetsServerInfo":{"viewer":"viewer","numRows":738666835,"libraries":[],"formats":[],"modalities":["text"]},"private":false,"repoType":"dataset","likes":34,"isLikedByUser":false,"isBenchmark":false},{"_id":"693a48f883594db696dbe3ed","position":1,"type":"dataset","note":{"html":"pretraining ⢠multilingual ⢠cultural heritage ⢠volunteer\nā ā ā ā ā (5/5)\nLargest public domain corpus; legally safe;\nTraceable provenance; diverse sources.\n\nOpenCulture, Gov Docs, Wikisource, Gutenberg\n\"The largest open AI training data set consisting entirely of public-domain texts.\"\nCC0 1.0 Universal","text":"pretraining ⢠multilingual ⢠cultural heritage ⢠volunteer\nā ā ā ā ā (5/5)\nLargest public domain corpus; legally safe;\nTraceable provenance; diverse sources.\n\nOpenCulture, Gov Docs, Wikisource, Gutenberg\n\"The largest open AI training data set consisting entirely of public-domain texts.\"\nCC0 1.0 Universal"},"author":"PleIAs","downloads":58219,"gated":false,"id":"PleIAs/common_corpus","lastModified":"2026-02-19T13:13:01.000Z","datasetsServerInfo":{"viewer":"viewer","numRows":69907,"libraries":["datasets","pandas","polars","mlcroissant"],"formats":["parquet"],"modalities":["tabular","text"]},"private":false,"repoType":"dataset","likes":354,"isLikedByUser":false,"isBenchmark":false},{"_id":"693a4872d4419e6cc820135b","position":2,"type":"dataset","note":{"html":"pretraining ⢠multilingual ⢠public domain ⢠internal\nā ā ā ā ā (5/5)\nMassive scale; strictly permissive licenses;\nAddresses IP concerns with clear provenance.\n\nGitHub, ArXiv, StackExchange, Public Domain\n\"Exclusively collecting content in the public domain or under permissive open licenses.\"\nApache License 2.0","text":"pretraining ⢠multilingual ⢠public domain ⢠internal\nā ā ā ā ā (5/5)\nMassive scale; strictly permissive licenses;\nAddresses IP concerns with clear provenance.\n\nGitHub, ArXiv, StackExchange, Public Domain\n\"Exclusively collecting content in the public domain or under permissive open licenses.\"\nApache License 2.0"},"author":"common-pile","downloads":13986,"gated":false,"id":"common-pile/comma_v0.1_training_dataset","lastModified":"2025-06-06T20:22:29.000Z","datasetsServerInfo":{"viewer":"viewer-partial","numRows":784283425,"libraries":["datasets","dask","mlcroissant"],"formats":["json"],"modalities":["text"]},"private":false,"repoType":"dataset","likes":39,"isLikedByUser":false,"isBenchmark":false},{"_id":"693b5c8cddfba9ae7f7d1f27","position":3,"type":"dataset","note":{"html":"\"76 OpenStax textbooks.\"\n\nCreative Commons Attribution 4.0","text":"\"76 OpenStax textbooks.\"\n\nCreative Commons Attribution 4.0"},"author":"crumb","downloads":1300,"gated":false,"id":"crumb/openstax-text","lastModified":"2023-07-14T02:21:10.000Z","datasetsServerInfo":{"viewer":"viewer","numRows":3354565,"libraries":["datasets","mlcroissant"],"formats":["text"],"modalities":["text"]},"private":false,"repoType":"dataset","likes":4,"isLikedByUser":false,"isBenchmark":false}],"position":3,"theme":"orange","private":false,"shareUrl":"https://hf.co/collections/urroxyz/human-written-and-legally-sourced","upvotes":2,"isUpvotedByUser":false},{"slug":"urroxyz/attentive-asr-models-for-onnx-680b95ea02fd689d22529f8b","title":"ATTENTIVE ASR MODELS FOR ONNX","description":"ONNX conversions of ASR models with attentions enabled for output. Especially useful for word-level timestamp extraction.","gating":false,"lastUpdated":"2025-12-11T04:37:24.434Z","owner":{"_id":"661ab1f1fa3b144a381fa454","avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/661ab1f1fa3b144a381fa454/IlpZBb9NCjo7ntFwMIH53.png","fullname":"Urro","name":"urroxyz","type":"user","isPro":false,"isHf":false,"isHfAdmin":false,"isMod":false,"followerCount":3,"isUserFollowing":false},"items":[{"_id":"680b95ea02fd689d22529f8c","position":0,"type":"model","author":"urroxyz","authorData":{"_id":"661ab1f1fa3b144a381fa454","avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/661ab1f1fa3b144a381fa454/IlpZBb9NCjo7ntFwMIH53.png","fullname":"Urro","name":"urroxyz","type":"user","isPro":false,"isHf":false,"isHfAdmin":false,"isMod":false,"followerCount":3,"isUserFollowing":false},"downloads":5,"gated":false,"id":"urroxyz/whisper-medium_timestamped","availableInferenceProviders":[],"lastModified":"2025-08-15T03:10:36.000Z","likes":0,"pipeline_tag":"automatic-speech-recognition","private":false,"repoType":"model","isLikedByUser":false},{"_id":"680b95fca406831ffeab2a57","position":1,"type":"model","author":"urroxyz","authorData":{"_id":"661ab1f1fa3b144a381fa454","avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/661ab1f1fa3b144a381fa454/IlpZBb9NCjo7ntFwMIH53.png","fullname":"Urro","name":"urroxyz","type":"user","isPro":false,"isHf":false,"isHfAdmin":false,"isMod":false,"followerCount":3,"isUserFollowing":false},"downloads":1,"gated":false,"id":"urroxyz/whisper-medium.en_timestamped","availableInferenceProviders":[],"lastModified":"2025-04-25T12:23:20.000Z","likes":0,"pipeline_tag":"automatic-speech-recognition","private":false,"repoType":"model","isLikedByUser":false},{"_id":"6882e132a99baece90657789","position":2,"type":"model","author":"urroxyz","authorData":{"_id":"661ab1f1fa3b144a381fa454","avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/661ab1f1fa3b144a381fa454/IlpZBb9NCjo7ntFwMIH53.png","fullname":"Urro","name":"urroxyz","type":"user","isPro":false,"isHf":false,"isHfAdmin":false,"isMod":false,"followerCount":3,"isUserFollowing":false},"downloads":0,"gated":false,"id":"urroxyz/Voxtral-Mini-3B-2507_timestamped","availableInferenceProviders":[],"lastModified":"2025-07-27T00:47:21.000Z","likes":3,"pipeline_tag":"audio-text-to-text","private":false,"repoType":"model","isLikedByUser":false}],"position":4,"theme":"indigo","private":false,"shareUrl":"https://hf.co/collections/urroxyz/attentive-asr-models-for-onnx","upvotes":0,"isUpvotedByUser":false}],"datasets":[],"models":[{"author":"urroxyz","authorData":{"_id":"661ab1f1fa3b144a381fa454","avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/661ab1f1fa3b144a381fa454/IlpZBb9NCjo7ntFwMIH53.png","fullname":"Urro","name":"urroxyz","type":"user","isPro":false,"isHf":false,"isHfAdmin":false,"isMod":false,"followerCount":3,"isUserFollowing":false},"downloads":5,"gated":false,"id":"urroxyz/whisper-medium_timestamped","availableInferenceProviders":[],"lastModified":"2025-08-15T03:10:36.000Z","likes":0,"pipeline_tag":"automatic-speech-recognition","private":false,"repoType":"model","isLikedByUser":false},{"author":"urroxyz","authorData":{"_id":"661ab1f1fa3b144a381fa454","avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/661ab1f1fa3b144a381fa454/IlpZBb9NCjo7ntFwMIH53.png","fullname":"Urro","name":"urroxyz","type":"user","isPro":false,"isHf":false,"isHfAdmin":false,"isMod":false,"followerCount":3,"isUserFollowing":false},"downloads":0,"gated":false,"id":"urroxyz/Voxtral-Mini-3B-2507_timestamped","availableInferenceProviders":[],"lastModified":"2025-07-27T00:47:21.000Z","likes":3,"pipeline_tag":"audio-text-to-text","private":false,"repoType":"model","isLikedByUser":false},{"author":"urroxyz","authorData":{"_id":"661ab1f1fa3b144a381fa454","avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/661ab1f1fa3b144a381fa454/IlpZBb9NCjo7ntFwMIH53.png","fullname":"Urro","name":"urroxyz","type":"user","isPro":false,"isHf":false,"isHfAdmin":false,"isMod":false,"followerCount":3,"isUserFollowing":false},"downloads":1,"gated":false,"id":"urroxyz/whisper-medium.en_timestamped","availableInferenceProviders":[],"lastModified":"2025-04-25T12:23:20.000Z","likes":0,"pipeline_tag":"automatic-speech-recognition","private":false,"repoType":"model","isLikedByUser":false}],"buckets":[],"numBuckets":0,"numberLikes":54,"papers":[],"posts":[],"totalPosts":0,"spaces":[],"u":{"avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/661ab1f1fa3b144a381fa454/IlpZBb9NCjo7ntFwMIH53.png","isPro":false,"fullname":"Urro","user":"urroxyz","orgs":[{"avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/661ab1f1fa3b144a381fa454/m1jmWUD8A28t4kpf6wxl4.png","fullname":"URRO","name":"urrodotxyz","userRole":"admin","type":"org","isHf":false}],"signup":{"bluesky":"","details":"i like research on empowering small LMs to do better š®\r\ni DISLIKE video & image generation (esp. ai \"art\") š¤¢","github":"urroxyz","homepage":"https://urro.xyz/","linkedin":"","twitter":""},"isHf":false,"isMod":false,"type":"user","theme":"dark","status":"training"},"upvotes":119,"numFollowers":3,"numFollowingUsers":0,"numFollowingOrgs":10,"numModels":3,"numDatasets":0,"numSpaces":0,"isFollowing":false,"isFollower":false,"sampleFollowers":[{"user":"Roman190928","fullname":"Roman","type":"user","_id":"68e17abd12b8c1ecde52c89c","isPro":false,"avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/dLuwM-rwXqw-kRF-tM1ek.png"},{"user":"YellowjacketGames","fullname":"Ben Kelly","type":"user","_id":"6947f69751d7ae7c3c7b6908","isPro":true,"avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/PuIDZB9XDShHohKhYmdmp.png"},{"user":"daryltucker","fullname":"Daryl Tucker","type":"user","_id":"62e2cb662be89f0bf5d6e8d1","isPro":false,"avatarUrl":"https://cdn-avatars.huggingface.co/v1/production/uploads/1659030364276-noauth.jpeg"}],"isWatching":false,"hardwareItems":[],"isIgnored":false,"acceptLanguages":["*"],"filters":{},"currentRepoPage":0}">