|
2 | 2 | * |
3 | 3 | * Copyright (c) 2023 Google LLC |
4 | 4 | * Written by Manfred SCHLAEGL, 2022 |
5 | | - * Dragoș Tiselice <dtiselice@google.com>, May 2023. |
6 | | - * Filip Wasil <f.wasil@samsung.com>, March 2025. |
7 | | - * Liang Junzhao <junzhao.liang@spacemit.com>, November 2025. |
| 5 | + * Revised by: |
| 6 | + * - Dragoș Tiselice <dtiselice@google.com>, May 2023 |
| 7 | + * - Filip Wasil <f.wasil@samsung.com>, March 2025 |
| 8 | + * - Liang Junzhao <junzhao.liang@spacemit.com>, November 2025 |
| 9 | + * - Alexander Smorkalov <alexander.smorkalov@opencv.ai>, December 2025 |
8 | 10 | * |
9 | 11 | * This code is released under the libpng license. |
10 | 12 | * For conditions of distribution and use, see the disclaimer |
@@ -175,47 +177,6 @@ png_read_filter_row_avg4_rvv(png_row_infop row_info, png_bytep row, |
175 | 177 | PNG_UNUSED(prev_row) |
176 | 178 | } |
177 | 179 |
|
178 | | -#defineMIN_CHUNK_LEN 256 |
179 | | -#defineMAX_CHUNK_LEN 2048 |
180 | | - |
181 | | -staticinlinevuint8m1_t |
182 | | -prefix_sum(vuint8m1_tchunk,unsignedchar*carry,size_tvl, |
183 | | -size_tmax_chunk_len) |
184 | | -{ |
185 | | -size_tr; |
186 | | - |
187 | | -for (r=1;r<MIN_CHUNK_LEN;r <<=1) |
188 | | - { |
189 | | -vbool8_tshift_mask=__riscv_vmsgeu_vx_u8m1_b8(__riscv_vid_v_u8m1(vl),r,vl); |
190 | | -chunk=__riscv_vadd_vv_u8m1_mu(shift_mask,chunk,chunk,__riscv_vslideup_vx_u8m1(__riscv_vundefined_u8m1(),chunk,r,vl),vl); |
191 | | - } |
192 | | - |
193 | | -for (r=MIN_CHUNK_LEN;r<MAX_CHUNK_LEN&&r<max_chunk_len;r <<=1) |
194 | | - { |
195 | | -vbool8_tshift_mask=__riscv_vmsgeu_vx_u8m1_b8(__riscv_vid_v_u8m1(vl),r,vl); |
196 | | -chunk=__riscv_vadd_vv_u8m1_mu(shift_mask,chunk,chunk,__riscv_vslideup_vx_u8m1(__riscv_vundefined_u8m1(),chunk,r,vl),vl); |
197 | | - } |
198 | | - |
199 | | -chunk=__riscv_vadd_vx_u8m1(chunk,*carry,vl); |
200 | | -*carry=__riscv_vmv_x_s_u8m1_u8(__riscv_vslidedown_vx_u8m1(chunk,vl-1,vl)); |
201 | | - |
202 | | -returnchunk; |
203 | | -} |
204 | | - |
205 | | -staticinlinevint16m1_t |
206 | | -abs_diff(vuint16m1_ta,vuint16m1_tb,size_tvl) |
207 | | -{ |
208 | | -vint16m1_tdiff=__riscv_vreinterpret_v_u16m1_i16m1(__riscv_vsub_vv_u16m1(a,b,vl)); |
209 | | -vbool16_tmask=__riscv_vmslt_vx_i16m1_b16(diff,0,vl); |
210 | | -return__riscv_vrsub_vx_i16m1_m(mask,diff,0,vl); |
211 | | -} |
212 | | - |
213 | | -staticinlinevint16m1_t |
214 | | -abs_sum(vint16m1_ta,vint16m1_tb,size_tvl) |
215 | | -{ |
216 | | -return__riscv_vadd_vv_i16m1(a,b,vl); |
217 | | -} |
218 | | - |
219 | 180 | staticinlinevoid |
220 | 181 | png_read_filter_row_paeth_rvv(size_tlen,size_tbpp,unsignedchar*row, |
221 | 182 | constunsignedchar*prev) |
|