@@ -5,6 +5,7 @@ use openvm_instructions::exe::SparseMemoryImage;
5
5
use openvm_stark_backend:: p3_field:: PrimeField32 ;
6
6
use serde:: { Deserialize , Serialize } ;
7
7
8
+ use super :: online:: GuestMemory ;
8
9
use crate :: arch:: MemoryConfig ;
9
10
10
11
/// (address_space, pointer)
@@ -72,6 +73,7 @@ impl<const PAGE_SIZE: usize> PagedVec<PAGE_SIZE> {
72
73
ptr:: copy_nonoverlapping ( page. as_ptr ( ) . add ( offset) , dst, len) ;
73
74
ptr:: copy_nonoverlapping ( new, page. as_mut_ptr ( ) . add ( offset) , len) ;
74
75
} else {
76
+ assert_eq ! ( start_page + 1 , end_page) ;
75
77
let offset = start % PAGE_SIZE ;
76
78
let first_part = PAGE_SIZE - offset;
77
79
{
@@ -120,11 +122,41 @@ impl<const PAGE_SIZE: usize> PagedVec<PAGE_SIZE> {
120
122
unsafe { result. assume_init ( ) }
121
123
}
122
124
125
+ /// # Panics
126
+ /// If `start..start + size_of<BLOCK>()` is out of bounds.
127
+ #[ inline( always) ]
128
+ pub fn set < BLOCK : Copy > ( & mut self , start : usize , values : & BLOCK ) {
129
+ let len = size_of :: < BLOCK > ( ) ;
130
+ let start_page = start / PAGE_SIZE ;
131
+ let end_page = ( start + len - 1 ) / PAGE_SIZE ;
132
+ let src = values as * const _ as * const u8 ;
133
+ unsafe {
134
+ if start_page == end_page {
135
+ let offset = start % PAGE_SIZE ;
136
+ let page = self . pages [ start_page] . get_or_insert_with ( || vec ! [ 0u8 ; PAGE_SIZE ] ) ;
137
+ ptr:: copy_nonoverlapping ( src, page. as_mut_ptr ( ) . add ( offset) , len) ;
138
+ } else {
139
+ assert_eq ! ( start_page + 1 , end_page) ;
140
+ let offset = start % PAGE_SIZE ;
141
+ let first_part = PAGE_SIZE - offset;
142
+ {
143
+ let page = self . pages [ start_page] . get_or_insert_with ( || vec ! [ 0u8 ; PAGE_SIZE ] ) ;
144
+ ptr:: copy_nonoverlapping ( src, page. as_mut_ptr ( ) . add ( offset) , first_part) ;
145
+ }
146
+ let second_part = len - first_part;
147
+ {
148
+ let page = self . pages [ end_page] . get_or_insert_with ( || vec ! [ 0u8 ; PAGE_SIZE ] ) ;
149
+ ptr:: copy_nonoverlapping ( src. add ( first_part) , page. as_mut_ptr ( ) , second_part) ;
150
+ }
151
+ }
152
+ }
153
+ }
154
+
123
155
/// memcpy of new `values` into pages, memcpy of old existing values into new returned value.
124
156
/// # Panics
125
157
/// If `from..from + size_of<BLOCK>()` is out of bounds.
126
158
#[ inline( always) ]
127
- pub fn set < BLOCK : Copy > ( & mut self , from : usize , values : & BLOCK ) -> BLOCK {
159
+ pub fn replace < BLOCK : Copy > ( & mut self , from : usize , values : & BLOCK ) -> BLOCK {
128
160
// Create an uninitialized array for old values.
129
161
let mut result: MaybeUninit < BLOCK > = MaybeUninit :: uninit ( ) ;
130
162
self . set_range_generic (
@@ -278,7 +310,7 @@ impl<const PAGE_SIZE: usize> AddressMap<PAGE_SIZE> {
278
310
) ;
279
311
self . paged_vecs
280
312
. get_unchecked_mut ( ( addr_space - self . as_offset ) as usize )
281
- . set ( ( ptr as usize ) * size_of :: < T > ( ) , & data)
313
+ . replace ( ( ptr as usize ) * size_of :: < T > ( ) , & data)
282
314
}
283
315
pub fn is_empty ( & self ) -> bool {
284
316
self . paged_vecs . iter ( ) . all ( |page| page. is_empty ( ) )
@@ -302,11 +334,12 @@ impl<const PAGE_SIZE: usize> AddressMap<PAGE_SIZE> {
302
334
}
303
335
}
304
336
305
- impl < const PAGE_SIZE : usize > AddressMap < PAGE_SIZE > {
306
- /// # Safety
307
- /// - `T` **must** be the correct type for a single memory cell for `addr_space`
308
- /// - Assumes `addr_space` is within the configured memory and not out of bounds
309
- pub unsafe fn get_range < T : Copy , const N : usize > ( & self , ( addr_space, ptr) : Address ) -> [ T ; N ] {
337
+ impl < const PAGE_SIZE : usize > GuestMemory for AddressMap < PAGE_SIZE > {
338
+ unsafe fn read < T : Copy , const BLOCK_SIZE : usize > (
339
+ & mut self ,
340
+ addr_space : u32 ,
341
+ ptr : u32 ,
342
+ ) -> [ T ; BLOCK_SIZE ] {
310
343
debug_assert_eq ! (
311
344
size_of:: <T >( ) ,
312
345
self . cell_size[ ( addr_space - self . as_offset) as usize ]
@@ -316,22 +349,20 @@ impl<const PAGE_SIZE: usize> AddressMap<PAGE_SIZE> {
316
349
. get ( ( ptr as usize ) * size_of :: < T > ( ) )
317
350
}
318
351
319
- /// # Safety
320
- /// - `T` **must** be the correct type for a single memory cell for `addr_space`
321
- /// - Assumes `addr_space` is within the configured memory and not out of bounds
322
- pub unsafe fn set_range < T : Copy , const N : usize > (
352
+ unsafe fn write < T : Copy , const BLOCK_SIZE : usize > (
323
353
& mut self ,
324
- ( addr_space, ptr) : Address ,
325
- values : & [ T ; N ] ,
326
- ) -> [ T ; N ] {
354
+ addr_space : u32 ,
355
+ ptr : u32 ,
356
+ values : & [ T ; BLOCK_SIZE ] ,
357
+ ) {
327
358
debug_assert_eq ! (
328
359
size_of:: <T >( ) ,
329
360
self . cell_size[ ( addr_space - self . as_offset) as usize ] ,
330
361
"addr_space={addr_space}"
331
362
) ;
332
363
self . paged_vecs
333
364
. get_unchecked_mut ( ( addr_space - self . as_offset ) as usize )
334
- . set ( ( ptr as usize ) * size_of :: < T > ( ) , values)
365
+ . set ( ( ptr as usize ) * size_of :: < T > ( ) , values) ;
335
366
}
336
367
}
337
368
0 commit comments