From 1a1bac38835a20c6b2609d8182ba2067b99f249f Mon Sep 17 00:00:00 2001 From: Bert Belder Date: Tue, 21 Jan 2020 16:03:42 +0100 Subject: [PATCH] Make BackingStore APIs more consistent with C++ (#234) --- src/array_buffer.rs | 100 ++++++++++++++++++++++++------------- src/binding.cc | 60 +++++++++++++--------- src/shared_array_buffer.rs | 85 +++++++++++++++++++++---------- src/support.rs | 36 +++++++++++-- tests/test_api.rs | 51 +++++++++++-------- 5 files changed, 224 insertions(+), 108 deletions(-) diff --git a/src/array_buffer.rs b/src/array_buffer.rs index 98d4ab0c..d6919e49 100644 --- a/src/array_buffer.rs +++ b/src/array_buffer.rs @@ -1,3 +1,11 @@ +// Copyright 2019-2020 the Deno authors. All rights reserved. MIT license. + +use std::ffi::c_void; +use std::ops::Deref; +use std::ops::DerefMut; +use std::ptr::null_mut; +use std::slice; + use crate::support::long; use crate::support::Delete; use crate::support::Opaque; @@ -13,11 +21,11 @@ use crate::ToLocal; extern "C" { fn v8__ArrayBuffer__Allocator__NewDefaultAllocator() -> *mut Allocator; fn v8__ArrayBuffer__Allocator__DELETE(this: &'static mut Allocator); - fn v8__ArrayBuffer__New__byte_length( + fn v8__ArrayBuffer__New__with_byte_length( isolate: *mut Isolate, byte_length: usize, ) -> *mut ArrayBuffer; - fn v8__ArrayBuffer__New__backing_store( + fn v8__ArrayBuffer__New__with_backing_store( isolate: *mut Isolate, backing_store: *mut SharedRef, ) -> *mut ArrayBuffer; @@ -25,20 +33,25 @@ extern "C" { fn v8__ArrayBuffer__GetBackingStore( self_: *const ArrayBuffer, ) -> SharedRef; - fn v8__ArrayBuffer__NewBackingStore( + fn v8__ArrayBuffer__NewBackingStore__with_byte_length( isolate: *mut Isolate, byte_length: usize, ) -> *mut BackingStore; - fn v8__ArrayBuffer__NewBackingStore_FromRaw( - data: *mut std::ffi::c_void, + fn v8__ArrayBuffer__NewBackingStore__with_data( + data: *mut c_void, byte_length: usize, deleter: BackingStoreDeleterCallback, - ) -> SharedRef; + deleter_data: *mut c_void, + ) -> *mut BackingStore; - fn v8__BackingStore__Data(self_: &mut BackingStore) -> *mut std::ffi::c_void; - fn v8__BackingStore__ByteLength(self_: &BackingStore) -> usize; - fn v8__BackingStore__IsShared(self_: &BackingStore) -> bool; - fn v8__BackingStore__DELETE(self_: &mut BackingStore); + fn v8__BackingStore__Data(this: *mut BackingStore) -> *mut c_void; + fn v8__BackingStore__ByteLength(this: *const BackingStore) -> usize; + fn v8__BackingStore__IsShared(this: *const BackingStore) -> bool; + fn v8__BackingStore__DELETE(this: &mut BackingStore); + + fn std__shared_ptr__v8__BackingStore__CONVERT__std__unique_ptr( + unique: UniqueRef, + ) -> SharedRef; fn std__shared_ptr__v8__BackingStore__get( ptr: *const SharedRef, ) -> *mut BackingStore; @@ -92,15 +105,15 @@ impl Delete for Allocator { } pub type BackingStoreDeleterCallback = unsafe extern "C" fn( - data: *mut std::ffi::c_void, + data: *mut c_void, byte_length: usize, - deleter_data: *mut std::ffi::c_void, + deleter_data: *mut c_void, ); pub unsafe extern "C" fn backing_store_deleter_callback( - data: *mut std::ffi::c_void, + data: *mut c_void, _byte_length: usize, - _deleter_data: *mut std::ffi::c_void, + _deleter_data: *mut c_void, ) { let b = Box::from_raw(data); drop(b) @@ -123,21 +136,11 @@ pub struct BackingStore([usize; 6]); unsafe impl Send for BackingStore {} impl BackingStore { - /// Returns a rust u8 slice with a lifetime equal to the lifetime of the BackingStore. - pub fn data_bytes<'a>(&'a mut self) -> &'a mut [u8] { - unsafe { - std::slice::from_raw_parts_mut::<'a, u8>( - v8__BackingStore__Data(self) as *mut u8, - self.byte_length(), - ) - } - } - /// Return a pointer to the beginning of the memory block for this backing /// store. The pointer is only valid as long as this backing store object /// lives. - pub fn data(&mut self) -> &mut std::ffi::c_void { - unsafe { &mut *v8__BackingStore__Data(self) } + pub fn data(&self) -> *mut c_void { + unsafe { v8__BackingStore__Data(self as *const _ as *mut Self) } } /// The length (in bytes) of this backing store. @@ -152,6 +155,24 @@ impl BackingStore { } } +impl Deref for BackingStore { + type Target = [u8]; + + /// Returns a [u8] slice refencing the data in the backing store. + fn deref(&self) -> &[u8] { + unsafe { slice::from_raw_parts(self.data() as *mut u8, self.byte_length()) } + } +} + +impl DerefMut for BackingStore { + /// Returns a mutable [u8] slice refencing the data in the backing store. + fn deref_mut(&mut self) -> &mut [u8] { + unsafe { + slice::from_raw_parts_mut(self.data() as *mut u8, self.byte_length()) + } + } +} + impl Delete for BackingStore { fn delete(&mut self) { unsafe { v8__BackingStore__DELETE(self) }; @@ -159,6 +180,11 @@ impl Delete for BackingStore { } impl Shared for BackingStore { + fn from_unique(unique: UniqueRef) -> SharedRef { + unsafe { + std__shared_ptr__v8__BackingStore__CONVERT__std__unique_ptr(unique) + } + } fn deref(ptr: *const SharedRef) -> *mut Self { unsafe { std__shared_ptr__v8__BackingStore__get(ptr) } } @@ -181,17 +207,17 @@ impl ArrayBuffer { ) -> Local<'sc, ArrayBuffer> { let isolate = scope.isolate(); let ptr = - unsafe { v8__ArrayBuffer__New__byte_length(isolate, byte_length) }; + unsafe { v8__ArrayBuffer__New__with_byte_length(isolate, byte_length) }; unsafe { scope.to_local(ptr) }.unwrap() } - pub fn new_with_backing_store<'sc>( + pub fn with_backing_store<'sc>( scope: &mut impl ToLocal<'sc>, backing_store: &mut SharedRef, ) -> Local<'sc, ArrayBuffer> { let isolate = scope.isolate(); let ptr = unsafe { - v8__ArrayBuffer__New__backing_store(isolate, &mut *backing_store) + v8__ArrayBuffer__New__with_backing_store(isolate, &mut *backing_store) }; unsafe { scope.to_local(ptr) }.unwrap() } @@ -200,6 +226,11 @@ impl ArrayBuffer { pub fn byte_length(&self) -> usize { unsafe { v8__ArrayBuffer__ByteLength(self) } } + + /// Get a shared pointer to the backing store of this array buffer. This + /// pointer coordinates the lifetime management of the internal storage + /// with any live ArrayBuffers on the heap, even across isolates. The embedder + /// should not attempt to manage lifetime of the storage through other means. pub fn get_backing_store(&self) -> SharedRef { unsafe { v8__ArrayBuffer__GetBackingStore(self) } } @@ -216,7 +247,7 @@ impl ArrayBuffer { byte_length: usize, ) -> UniqueRef { unsafe { - UniqueRef::from_raw(v8__ArrayBuffer__NewBackingStore( + UniqueRef::from_raw(v8__ArrayBuffer__NewBackingStore__with_byte_length( scope.isolate(), byte_length, )) @@ -232,13 +263,14 @@ impl ArrayBuffer { /// to the buffer must not be passed again to any V8 API function. pub unsafe fn new_backing_store_from_boxed_slice( data: Box<[u8]>, - ) -> SharedRef { + ) -> UniqueRef { let byte_length = data.len(); - let data_ptr = Box::into_raw(data) as *mut std::ffi::c_void; - v8__ArrayBuffer__NewBackingStore_FromRaw( + let data_ptr = Box::into_raw(data) as *mut c_void; + UniqueRef::from_raw(v8__ArrayBuffer__NewBackingStore__with_data( data_ptr, byte_length, backing_store_deleter_callback, - ) + null_mut(), + )) } } diff --git a/src/binding.cc b/src/binding.cc index 63f33ccb..75386810 100644 --- a/src/binding.cc +++ b/src/binding.cc @@ -561,19 +561,19 @@ v8::Primitive* v8__PrimitiveArray__Get(v8::PrimitiveArray& self, return local_to_ptr(self.Get(isolate, index)); } -v8::BackingStore* v8__ArrayBuffer__NewBackingStore(v8::Isolate* isolate, - size_t length) { +v8::BackingStore* v8__ArrayBuffer__NewBackingStore__with_byte_length( + v8::Isolate* isolate, size_t byte_length) { std::unique_ptr u = - v8::ArrayBuffer::NewBackingStore(isolate, length); + v8::ArrayBuffer::NewBackingStore(isolate, byte_length); return u.release(); } -two_pointers_t v8__ArrayBuffer__NewBackingStore_FromRaw( - void* data, size_t length, v8::BackingStoreDeleterCallback deleter) { - std::unique_ptr u = - v8::ArrayBuffer::NewBackingStore(data, length, deleter, nullptr); - const std::shared_ptr bs = std::move(u); - return make_pod(bs); +v8::BackingStore* v8__ArrayBuffer__NewBackingStore__with_data( + void* data, size_t byte_length, v8::BackingStoreDeleterCallback deleter, + void* deleter_data) { + std::unique_ptr u = v8::ArrayBuffer::NewBackingStore( + data, byte_length, deleter, deleter_data); + return u.release(); } two_pointers_t v8__ArrayBuffer__GetBackingStore(v8::ArrayBuffer& self) { @@ -582,16 +582,21 @@ two_pointers_t v8__ArrayBuffer__GetBackingStore(v8::ArrayBuffer& self) { void* v8__BackingStore__Data(v8::BackingStore& self) { return self.Data(); } -size_t v8__BackingStore__ByteLength(v8::BackingStore& self) { +size_t v8__BackingStore__ByteLength(const v8::BackingStore& self) { return self.ByteLength(); } -bool v8__BackingStore__IsShared(v8::BackingStore& self) { +bool v8__BackingStore__IsShared(const v8::BackingStore& self) { return self.IsShared(); } void v8__BackingStore__DELETE(v8::BackingStore& self) { delete &self; } +two_pointers_t std__shared_ptr__v8__BackingStore__CONVERT__std__unique_ptr( + v8::BackingStore* ptr) { + return make_pod(std::shared_ptr(ptr)); +} + v8::BackingStore* std__shared_ptr__v8__BackingStore__get( const std::shared_ptr& ptr) { return ptr.get(); @@ -739,12 +744,12 @@ void v8__ArrayBuffer__Allocator__DELETE(v8::ArrayBuffer::Allocator& self) { delete &self; } -v8::ArrayBuffer* v8__ArrayBuffer__New__byte_length(v8::Isolate* isolate, - size_t byte_length) { +v8::ArrayBuffer* v8__ArrayBuffer__New__with_byte_length(v8::Isolate* isolate, + size_t byte_length) { return local_to_ptr(v8::ArrayBuffer::New(isolate, byte_length)); } -v8::ArrayBuffer* v8__ArrayBuffer__New__backing_store( +v8::ArrayBuffer* v8__ArrayBuffer__New__with_backing_store( v8::Isolate* isolate, std::shared_ptr& backing_store) { return local_to_ptr(v8::ArrayBuffer::New(isolate, backing_store)); } @@ -1072,11 +1077,16 @@ v8::PrimitiveArray* v8__ScriptOrModule__GetHostDefinedOptions( return local_to_ptr(self.GetHostDefinedOptions()); } -v8::SharedArrayBuffer* v8__SharedArrayBuffer__New(v8::Isolate* isolate, - size_t byte_length) { +v8::SharedArrayBuffer* v8__SharedArrayBuffer__New__with_byte_length( + v8::Isolate* isolate, size_t byte_length) { return local_to_ptr(v8::SharedArrayBuffer::New(isolate, byte_length)); } +v8::SharedArrayBuffer* v8__SharedArrayBuffer__New__with_backing_store( + v8::Isolate* isolate, std::shared_ptr& backing_store) { + return local_to_ptr(v8::SharedArrayBuffer::New(isolate, backing_store)); +} + size_t v8__SharedArrayBuffer__ByteLength(v8::SharedArrayBuffer& self) { return self.ByteLength(); } @@ -1086,17 +1096,19 @@ two_pointers_t v8__SharedArrayBuffer__GetBackingStore( return make_pod(self.GetBackingStore()); } -two_pointers_t v8__SharedArrayBuffer__NewBackingStore_FromRaw( - void* data, size_t length, v8::BackingStoreDeleterCallback deleter) { +v8::BackingStore* v8__SharedArrayBuffer__NewBackingStore__with_byte_length( + v8::Isolate* isolate, size_t byte_length) { std::unique_ptr u = - v8::SharedArrayBuffer::NewBackingStore(data, length, deleter, nullptr); - const std::shared_ptr bs = std::move(u); - return make_pod(bs); + v8::SharedArrayBuffer::NewBackingStore(isolate, byte_length); + return u.release(); } -v8::SharedArrayBuffer* v8__SharedArrayBuffer__New__backing_store( - v8::Isolate* isolate, std::shared_ptr& backing_store) { - return local_to_ptr(v8::SharedArrayBuffer::New(isolate, backing_store)); +v8::BackingStore* v8__SharedArrayBuffer__NewBackingStore__with_data( + void* data, size_t byte_length, v8::BackingStoreDeleterCallback deleter, + void* deleter_data) { + std::unique_ptr u = v8::SharedArrayBuffer::NewBackingStore( + data, byte_length, deleter, deleter_data); + return u.release(); } v8::Value* v8__JSON__Parse(v8::Local context, diff --git a/src/shared_array_buffer.rs b/src/shared_array_buffer.rs index dd370411..859f3b56 100644 --- a/src/shared_array_buffer.rs +++ b/src/shared_array_buffer.rs @@ -1,21 +1,27 @@ +// Copyright 2019-2020 the Deno authors. All rights reserved. MIT license. + +use std::ffi::c_void; +use std::ptr::null_mut; + use crate::array_buffer::backing_store_deleter_callback; use crate::support::SharedRef; +use crate::support::UniqueRef; use crate::BackingStore; use crate::BackingStoreDeleterCallback; +use crate::InIsolate; use crate::Isolate; use crate::Local; use crate::SharedArrayBuffer; use crate::ToLocal; extern "C" { - fn v8__SharedArrayBuffer__New( + fn v8__SharedArrayBuffer__New__with_byte_length( isolate: *mut Isolate, byte_length: usize, ) -> *mut SharedArrayBuffer; - fn v8__SharedArrayBuffer__New__DEPRECATED( + fn v8__SharedArrayBuffer__New__with_backing_store( isolate: *mut Isolate, - data_ptr: *mut std::ffi::c_void, - data_length: usize, + backing_store: *mut SharedRef, ) -> *mut SharedArrayBuffer; fn v8__SharedArrayBuffer__ByteLength( self_: *const SharedArrayBuffer, @@ -23,15 +29,16 @@ extern "C" { fn v8__SharedArrayBuffer__GetBackingStore( self_: *const SharedArrayBuffer, ) -> SharedRef; - fn v8__SharedArrayBuffer__NewBackingStore_FromRaw( - data: *mut std::ffi::c_void, + fn v8__SharedArrayBuffer__NewBackingStore__with_byte_length( + isolate: *mut Isolate, + byte_length: usize, + ) -> *mut BackingStore; + fn v8__SharedArrayBuffer__NewBackingStore__with_data( + data: *mut c_void, byte_length: usize, deleter: BackingStoreDeleterCallback, - ) -> SharedRef; - fn v8__SharedArrayBuffer__New__backing_store( - isolate: *mut Isolate, - backing_store: *mut SharedRef, - ) -> *mut SharedArrayBuffer; + deleter_data: *mut c_void, + ) -> *mut BackingStore; } impl SharedArrayBuffer { @@ -44,10 +51,27 @@ impl SharedArrayBuffer { byte_length: usize, ) -> Option> { unsafe { - Local::from_raw(v8__SharedArrayBuffer__New(scope.isolate(), byte_length)) + Local::from_raw(v8__SharedArrayBuffer__New__with_byte_length( + scope.isolate(), + byte_length, + )) } } + pub fn with_backing_store<'sc>( + scope: &mut impl ToLocal<'sc>, + backing_store: &mut SharedRef, + ) -> Local<'sc, SharedArrayBuffer> { + let isolate = scope.isolate(); + let ptr = unsafe { + v8__SharedArrayBuffer__New__with_backing_store( + isolate, + &mut *backing_store, + ) + }; + unsafe { scope.to_local(ptr) }.unwrap() + } + /// Data length in bytes. pub fn byte_length(&self) -> usize { unsafe { v8__SharedArrayBuffer__ByteLength(self) } @@ -61,15 +85,25 @@ impl SharedArrayBuffer { unsafe { v8__SharedArrayBuffer__GetBackingStore(self) } } - pub fn new_with_backing_store<'sc>( - scope: &mut impl ToLocal<'sc>, - backing_store: &mut SharedRef, - ) -> Local<'sc, SharedArrayBuffer> { - let isolate = scope.isolate(); - let ptr = unsafe { - v8__SharedArrayBuffer__New__backing_store(isolate, &mut *backing_store) - }; - unsafe { scope.to_local(ptr) }.unwrap() + /// Returns a new standalone BackingStore that is allocated using the array + /// buffer allocator of the isolate. The result can be later passed to + /// ArrayBuffer::New. + /// + /// If the allocator returns nullptr, then the function may cause GCs in the + /// given isolate and re-try the allocation. If GCs do not help, then the + /// function will crash with an out-of-memory error. + pub fn new_backing_store( + scope: &mut impl InIsolate, + byte_length: usize, + ) -> UniqueRef { + unsafe { + UniqueRef::from_raw( + v8__SharedArrayBuffer__NewBackingStore__with_byte_length( + scope.isolate(), + byte_length, + ), + ) + } } /// Returns a new standalone BackingStore that takes over the ownership of @@ -81,13 +115,14 @@ impl SharedArrayBuffer { /// to the buffer must not be passed again to any V8 API function. pub unsafe fn new_backing_store_from_boxed_slice( data: Box<[u8]>, - ) -> SharedRef { + ) -> UniqueRef { let byte_length = data.len(); - let data_ptr = Box::into_raw(data) as *mut std::ffi::c_void; - v8__SharedArrayBuffer__NewBackingStore_FromRaw( + let data_ptr = Box::into_raw(data) as *mut c_void; + UniqueRef::from_raw(v8__SharedArrayBuffer__NewBackingStore__with_data( data_ptr, byte_length, backing_store_deleter_callback, - ) + null_mut(), + )) } } diff --git a/src/support.rs b/src/support.rs index 209be17d..7650f852 100644 --- a/src/support.rs +++ b/src/support.rs @@ -57,6 +57,15 @@ where } } +impl From> for UniquePtr +where + T: Delete, +{ + fn from(unique_ref: UniqueRef) -> Self { + unsafe { Self::from_raw(unique_ref.into_raw()) } + } +} + impl Deref for UniquePtr where T: Delete, @@ -101,6 +110,13 @@ where Self(r) } + pub fn make_shared(self) -> SharedRef + where + T: Shared, + { + self.into() + } + pub unsafe fn from_raw(p: *mut T) -> Self { transmute(NonNull::new(p)) } @@ -114,9 +130,9 @@ impl Deref for UniqueRef where T: Delete, { - type Target = &'static mut T; + type Target = T; fn deref(&self) -> &Self::Target { - &self.0 + self.0 } } @@ -125,7 +141,7 @@ where T: Delete, { fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 + self.0 } } @@ -143,8 +159,9 @@ where pub trait Shared where - Self: Sized + 'static, + Self: Delete + 'static, { + fn from_unique(unique: UniqueRef) -> SharedRef; fn deref(shared_ptr: *const SharedRef) -> *mut Self; fn reset(shared_ptr: *mut SharedRef); fn use_count(shared_ptr: *const SharedRef) -> long; @@ -157,6 +174,8 @@ pub struct SharedRef([*mut Opaque; 2], PhantomData) where T: Shared; +unsafe impl Send for SharedRef where T: Shared + Send {} + impl SharedRef where T: Shared, @@ -166,7 +185,14 @@ where } } -unsafe impl Send for SharedRef where T: Shared + Send {} +impl From> for SharedRef +where + T: Delete + Shared, +{ + fn from(unique: UniqueRef) -> Self { + ::from_unique(unique) + } +} impl Deref for SharedRef where diff --git a/tests/test_api.rs b/tests/test_api.rs index d27b34b2..81512e70 100644 --- a/tests/test_api.rs +++ b/tests/test_api.rs @@ -257,16 +257,24 @@ fn array_buffer() { assert_eq!(false, bs.is_shared()); let data: Box<[u8]> = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9].into_boxed_slice(); - let mut bs = + let unique_bs = unsafe { v8::ArrayBuffer::new_backing_store_from_boxed_slice(data) }; - assert_eq!(10, bs.byte_length()); - assert_eq!(false, bs.is_shared()); - let ab = v8::ArrayBuffer::new_with_backing_store(scope, &mut bs); - let mut bs = ab.get_backing_store(); + assert_eq!(10, unique_bs.byte_length()); + assert_eq!(false, unique_bs.is_shared()); + assert_eq!(unique_bs[0], 0); + assert_eq!(unique_bs[9], 9); + + let mut shared_bs = unique_bs.make_shared(); + assert_eq!(10, shared_bs.byte_length()); + assert_eq!(false, shared_bs.is_shared()); + assert_eq!(shared_bs[0], 0); + assert_eq!(shared_bs[9], 9); + + let ab = v8::ArrayBuffer::with_backing_store(scope, &mut shared_bs); + let bs = ab.get_backing_store(); assert_eq!(10, ab.byte_length()); - let data = bs.data_bytes(); - assert_eq!(data[0], 0); - assert_eq!(data[9], 9); + assert_eq!(bs[0], 0); + assert_eq!(bs[9], 9); } } @@ -311,7 +319,7 @@ fn array_buffer_with_shared_backing_store() { drop(bs1); assert_eq!(2, v8::SharedRef::use_count(&bs3)); - let ab2 = v8::ArrayBuffer::new_with_backing_store(scope, &mut bs3); + let ab2 = v8::ArrayBuffer::with_backing_store(scope, &mut bs3); assert_eq!(ab1.byte_length(), ab2.byte_length()); assert_eq!(3, v8::SharedRef::use_count(&bs3)); @@ -1847,9 +1855,8 @@ fn shared_array_buffer() { assert!(maybe_sab.is_some()); let sab = maybe_sab.unwrap(); let mut backing_store = sab.get_backing_store(); - let shared_buf = backing_store.data_bytes(); - shared_buf[5] = 12; - shared_buf[12] = 52; + backing_store[5] = 12; + backing_store[12] = 52; let global = context.global(scope); assert_eq!( global.create_data_property( @@ -1872,21 +1879,25 @@ fn shared_array_buffer() { let result: v8::Local = script.run(scope, context).unwrap().try_into().unwrap(); assert_eq!(result.value(), 64); - assert_eq!(shared_buf[2], 16); - assert_eq!(shared_buf[14], 62); + assert_eq!(backing_store[2], 16); + assert_eq!(backing_store[14], 62); let data: Box<[u8]> = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9].into_boxed_slice(); - let mut bs = unsafe { + let bs = unsafe { v8::SharedArrayBuffer::new_backing_store_from_boxed_slice(data) }; assert_eq!(10, bs.byte_length()); assert_eq!(true, bs.is_shared()); - let ab = v8::SharedArrayBuffer::new_with_backing_store(scope, &mut bs); - let mut bs = ab.get_backing_store(); + + let mut bs = bs.make_shared(); + assert_eq!(10, bs.byte_length()); + assert_eq!(true, bs.is_shared()); + + let ab = v8::SharedArrayBuffer::with_backing_store(scope, &mut bs); + let bs = ab.get_backing_store(); assert_eq!(10, ab.byte_length()); - let data = bs.data_bytes(); - assert_eq!(data[0], 0); - assert_eq!(data[9], 9); + assert_eq!(bs[0], 0); + assert_eq!(bs[9], 9); } }