{"id":1654,"date":"2025-11-24T07:33:59","date_gmt":"2025-11-24T07:33:59","guid":{"rendered":"https:\/\/ccds.ai\/?p=1654"},"modified":"2025-12-09T07:37:46","modified_gmt":"2025-12-09T07:37:46","slug":"1654","status":"publish","type":"post","link":"https:\/\/ccds.ai\/?p=1654","title":{"rendered":"One of our research papers have been accepted at WACV 2026"},"content":{"rendered":"<div id='av_section_1'  class='avia-section av-av_section-e72984d7d38aa3d67d084c0697520518 main_color avia-section-default avia-no-border-styling  avia-builder-el-0  avia-builder-el-no-sibling  avia-bg-style-scroll container_wrap fullsize'  ><div class='container av-section-cont-open' ><main  role=\"main\" itemprop=\"mainContentOfPage\"  class='template-page content  av-content-full alpha units'><div class='post-entry post-entry-type-page post-entry-1654'><div class='entry-content-wrapper clearfix'>\n<div class='flex_column av-av_one_full-a961ec8c42a44a397321d1a2224da3ca av_one_full  avia-builder-el-1  avia-builder-el-no-sibling  first flex_column_div '     ><section  class='av_textblock_section av-miy9llxr-e370b20cb2d930d1b12aa9edc3be75b2'  itemscope=\"itemscope\" itemtype=\"https:\/\/schema.org\/BlogPosting\" itemprop=\"blogPost\" ><div class='avia_textblock'  itemprop=\"text\" ><div class=\"xdj266r x14z9mp xat24cr x1lziwak x1vvkbs x126k92a\">\n<div dir=\"auto\">We are thrilled to announce that our paper \u201cR-MMA: Enhancing Vision-Language Models with Recurrent Adapters for Few-Shot and Cross-Domain Generalization\u201d has been accepted at WACV 2026, one of the premier conferences in Computer Vision (CORE Rank A). Congratulations to Md Fahim and Mir Sazzat Hossain (Research Assistants, CCDS). <span class=\"html-span xexx8yu xyri2b x18d9i69 x1c1uobl x1hl2dhg x16tdsg8 x1vvkbs x3nfvp2 x1j61x8r x1fcty0u xdj266r xat24cr xm2jcoa x1mpyi22 xxymvpz xlup9mm x1kky2od\"><img decoding=\"async\" class=\"xz74otr x15mokao x1ga7v0g x16uus16 xbiv7yw\" src=\"https:\/\/static.xx.fbcdn.net\/images\/emoji.php\/v9\/t8c\/1\/16\/1f389.png\" alt=\"\ud83c\udf89\" width=\"16\" height=\"16\" \/><\/span><span class=\"html-span xexx8yu xyri2b x18d9i69 x1c1uobl x1hl2dhg x16tdsg8 x1vvkbs x3nfvp2 x1j61x8r x1fcty0u xdj266r xat24cr xm2jcoa x1mpyi22 xxymvpz xlup9mm x1kky2od\"><img decoding=\"async\" class=\"xz74otr x15mokao x1ga7v0g x16uus16 xbiv7yw\" src=\"https:\/\/static.xx.fbcdn.net\/images\/emoji.php\/v9\/t50\/1\/16\/1f525.png\" alt=\"\ud83d\udd25\" width=\"16\" height=\"16\" \/><\/span><\/div>\n<\/div>\n<div class=\"x14z9mp xat24cr x1lziwak x1vvkbs xtlvy1s x126k92a\">\n<div dir=\"auto\">This work introduces R-MMA, a lightweight and highly parameter-efficient adapter designed to enhance few-shot and cross-domain generalization in Vision-Language Models, such as CLIP. R-MMA aligns and refines frozen encoder features through a unified attention-driven representation, achieving state-of-the-art performance across base-to-novel, cross-dataset, and domain generalization benchmarks.<\/div>\n<\/div>\n<div class=\"x14z9mp xat24cr x1lziwak x1vvkbs xtlvy1s x126k92a\">\n<div dir=\"auto\">Congratulations to all co-authors, collaborators, and supervisors <span class=\"html-span xexx8yu xyri2b x18d9i69 x1c1uobl x1hl2dhg x16tdsg8 x1vvkbs x3nfvp2 x1j61x8r x1fcty0u xdj266r xat24cr xm2jcoa x1mpyi22 xxymvpz xlup9mm x1kky2od\"><img loading=\"lazy\" decoding=\"async\" class=\"xz74otr x15mokao x1ga7v0g x16uus16 xbiv7yw\" src=\"https:\/\/static.xx.fbcdn.net\/images\/emoji.php\/v9\/tb4\/1\/16\/1f38a.png\" alt=\"\ud83c\udf8a\" width=\"16\" height=\"16\" \/><\/span><span class=\"html-span xexx8yu xyri2b x18d9i69 x1c1uobl x1hl2dhg x16tdsg8 x1vvkbs x3nfvp2 x1j61x8r x1fcty0u xdj266r xat24cr xm2jcoa x1mpyi22 xxymvpz xlup9mm x1kky2od\"><img loading=\"lazy\" decoding=\"async\" class=\"xz74otr x15mokao x1ga7v0g x16uus16 xbiv7yw\" src=\"https:\/\/static.xx.fbcdn.net\/images\/emoji.php\/v9\/tb4\/1\/16\/1f38a.png\" alt=\"\ud83c\udf8a\" width=\"16\" height=\"16\" \/><\/span><\/div>\n<\/div>\n<\/div><\/section><br \/>\n\n<style type=\"text\/css\" data-created_by=\"avia_inline_auto\" id=\"style-css-av-miy9lz9z-beaef28f50221f07a170db085c3dec56\">\n.avia-image-container.av-miy9lz9z-beaef28f50221f07a170db085c3dec56 img.avia_image{\nbox-shadow:none;\n}\n.avia-image-container.av-miy9lz9z-beaef28f50221f07a170db085c3dec56 .av-image-caption-overlay-center{\ncolor:#ffffff;\n}\n<\/style>\n<div  class='avia-image-container av-miy9lz9z-beaef28f50221f07a170db085c3dec56 av-styling- avia-align-center  avia-builder-el-3  el_after_av_textblock  avia-builder-el-last '   itemprop=\"image\" itemscope=\"itemscope\" itemtype=\"https:\/\/schema.org\/ImageObject\" ><div class=\"avia-image-container-inner\"><div class=\"avia-image-overlay-wrap\"><img decoding=\"async\" class='wp-image-1655 avia-img-lazy-loading-not-1655 avia_image ' src=\"https:\/\/ccds.ai\/wp-content\/uploads\/2025\/12\/588472913_862637909613335_5488227795735314575_n-300x120.jpg\" alt='' title='588472913_862637909613335_5488227795735314575_n'  height=\"120\" width=\"300\"  itemprop=\"thumbnailUrl\" srcset=\"https:\/\/ccds.ai\/wp-content\/uploads\/2025\/12\/588472913_862637909613335_5488227795735314575_n-300x120.jpg 300w, https:\/\/ccds.ai\/wp-content\/uploads\/2025\/12\/588472913_862637909613335_5488227795735314575_n-1030x412.jpg 1030w, https:\/\/ccds.ai\/wp-content\/uploads\/2025\/12\/588472913_862637909613335_5488227795735314575_n-768x307.jpg 768w, https:\/\/ccds.ai\/wp-content\/uploads\/2025\/12\/588472913_862637909613335_5488227795735314575_n-705x282.jpg 705w, https:\/\/ccds.ai\/wp-content\/uploads\/2025\/12\/588472913_862637909613335_5488227795735314575_n.jpg 1382w\" sizes=\"(max-width: 300px) 100vw, 300px\" \/><\/div><\/div><\/div><\/p><\/div>\n","protected":false},"excerpt":{"rendered":"","protected":false},"author":2,"featured_media":1655,"comment_status":"closed","ping_status":"open","sticky":false,"template":"","format":"standard","meta":{"_acf_changed":false,"_jetpack_memberships_contains_paid_content":false,"footnotes":""},"categories":[39],"tags":[],"class_list":["post-1654","post","type-post","status-publish","format-standard","has-post-thumbnail","hentry","category-publications"],"acf":[],"jetpack_featured_media_url":"https:\/\/ccds.ai\/wp-content\/uploads\/2025\/12\/588472913_862637909613335_5488227795735314575_n.jpg","jetpack_sharing_enabled":true,"_links":{"self":[{"href":"https:\/\/ccds.ai\/index.php?rest_route=\/wp\/v2\/posts\/1654","targetHints":{"allow":["GET"]}}],"collection":[{"href":"https:\/\/ccds.ai\/index.php?rest_route=\/wp\/v2\/posts"}],"about":[{"href":"https:\/\/ccds.ai\/index.php?rest_route=\/wp\/v2\/types\/post"}],"author":[{"embeddable":true,"href":"https:\/\/ccds.ai\/index.php?rest_route=\/wp\/v2\/users\/2"}],"replies":[{"embeddable":true,"href":"https:\/\/ccds.ai\/index.php?rest_route=%2Fwp%2Fv2%2Fcomments&post=1654"}],"version-history":[{"count":3,"href":"https:\/\/ccds.ai\/index.php?rest_route=\/wp\/v2\/posts\/1654\/revisions"}],"predecessor-version":[{"id":1657,"href":"https:\/\/ccds.ai\/index.php?rest_route=\/wp\/v2\/posts\/1654\/revisions\/1657"}],"wp:featuredmedia":[{"embeddable":true,"href":"https:\/\/ccds.ai\/index.php?rest_route=\/wp\/v2\/media\/1655"}],"wp:attachment":[{"href":"https:\/\/ccds.ai\/index.php?rest_route=%2Fwp%2Fv2%2Fmedia&parent=1654"}],"wp:term":[{"taxonomy":"category","embeddable":true,"href":"https:\/\/ccds.ai\/index.php?rest_route=%2Fwp%2Fv2%2Fcategories&post=1654"},{"taxonomy":"post_tag","embeddable":true,"href":"https:\/\/ccds.ai\/index.php?rest_route=%2Fwp%2Fv2%2Ftags&post=1654"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}