Generated by Cython 0.29.22
Yellow lines hint at Python interaction.
Click on a line that starts with a "+
" to see the C code that Cython generated for it.
Raw output: _c.c
+0001: # cython: language_level = 3
__pyx_t_1 = __Pyx_PyDict_NewPresized(42); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory___str___line_3290, __pyx_kp_u_String_representation_If_attr_co) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory___eq___line_3383, __pyx_kp_u_Equality_comparison_Arguments_ot) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory_count_line_3919, __pyx_kp_u_Counts_items_Arguments_item_item) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory___getitem___line_3979, __pyx_kp_u_Gets_data_Arguments_key_slice_or) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory___setitem___line_4063, __pyx_kp_u_Sets_data_Arguments_key_slice_or) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory___delitem___line_4242, __pyx_kp_u_Deletes_data_Arguments_key_slice) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory_append_line_4344, __pyx_kp_u_Appends_a_single_item_Arguments) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory_pop_line_4462, __pyx_kp_u_Takes_a_value_away_Arguments_add) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory_start___get___line_4779, __pyx_kp_u_int_Inclusive_start_address_This) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory_endex___get___line_4838, __pyx_kp_u_int_Exclusive_end_address_This_p) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory_span___get___line_4886, __pyx_kp_u_tuple_of_int_Memory_address_span) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory_endin___get___line_4915, __pyx_kp_u_int_Inclusive_end_address_This_p) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory_content_start___get___lin, __pyx_kp_u_int_Inclusive_content_start_addr) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory_content_endex___get___lin, __pyx_kp_u_int_Exclusive_content_end_addres) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory_content_span___get___line, __pyx_kp_u_tuple_of_int_Memory_content_addr) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory_content_endin___get___lin, __pyx_kp_u_int_Inclusive_content_end_addres) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory_content_size___get___line, __pyx_kp_u_Actual_content_size_Returns_int) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory_content_parts___get___lin, __pyx_kp_u_Number_of_blocks_Returns_int_The) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory_bound_line_5380, __pyx_kp_u_Bounds_addresses_It_bounds_the_g) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory__block_index_at_line_5450, __pyx_kp_u_Locates_the_block_enclosing_an_a) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory__block_index_start_line_5, __pyx_kp_u_Locates_the_first_block_inside_o) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory__block_index_endex_line_5, __pyx_kp_u_Locates_the_first_block_after_an) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory_peek_line_5565, __pyx_kp_u_Gets_the_item_at_an_address_Retu) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory_poke_line_5677, __pyx_kp_u_Sets_the_item_at_an_address_Argu) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory_extract_line_5817, __pyx_kp_u_Selects_items_from_a_range_Argum) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory_shift_line_5934, __pyx_kp_u_Shifts_the_items_Arguments_offse) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory_reserve_line_6033, __pyx_kp_u_Inserts_emptiness_Reserves_empti) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory_insert_line_6370, __pyx_kp_u_Inserts_data_Inserts_data_moving) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory_delete_line_6441, __pyx_kp_u_Deletes_an_address_range_Argumen) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory_clear_line_6490, __pyx_kp_u_Clears_an_address_range_Argument) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory_crop_line_6681, __pyx_kp_u_Keeps_data_within_an_address_ran) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory_write_line_6816, __pyx_kp_u_Writes_data_Arguments_address_in) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory_fill_line_6903, __pyx_kp_u_Overwrites_a_range_with_a_patter) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory_flood_line_7039, __pyx_kp_u_Fills_emptiness_between_non_touc) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory_keys_line_7107, __pyx_kp_u_Iterates_over_addresses_Iterates) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory_values_line_7180, __pyx_kp_u_Iterates_over_values_Iterates_ov) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory_rvalues_line_7253, __pyx_kp_u_Iterates_over_values_reversed_or) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory_items_line_7325, __pyx_kp_u_Iterates_over_address_and_value) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory_intervals_line_7383, __pyx_kp_u_Iterates_over_block_intervals_It) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory_gaps_line_7447, __pyx_kp_u_Iterates_over_block_gaps_Iterate) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory_equal_span_line_7537, __pyx_kp_u_Span_of_homogeneous_data_It_sear) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_1, __pyx_kp_u_Memory_block_span_line_7653, __pyx_kp_u_Span_of_block_data_It_searches_f) < 0) __PYX_ERR(0, 1, __pyx_L1_error) if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_1) < 0) __PYX_ERR(0, 1, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
0002: # cython: embedsignature = True
0003:
0004: # Copyright (c) 2020-2021, Andrea Zoppi.
0005: # All rights reserved.
0006: #
0007: # Redistribution and use in source and binary forms, with or without
0008: # modification, are permitted provided that the following conditions are met:
0009: #
0010: # 1. Redistributions of source code must retain the above copyright notice,
0011: # this list of conditions and the following disclaimer.
0012: #
0013: # 2. Redistributions in binary form must reproduce the above copyright
0014: # notice, this list of conditions and the following disclaimer in the
0015: # documentation and/or other materials provided with the distribution.
0016: #
0017: # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
0018: # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
0019: # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
0020: # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
0021: # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
0022: # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
0023: # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
0024: # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
0025: # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
0026: # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
0027: # POSSIBILITY OF SUCH DAMAGE.
0028:
0029: cimport cython
0030: from cpython.bytearray cimport PyByteArray_FromStringAndSize
0031: from cpython.bytes cimport PyBytes_FromStringAndSize
0032:
+0033: from itertools import islice as _islice
__pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 33, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_INCREF(__pyx_n_s_islice_2); __Pyx_GIVEREF(__pyx_n_s_islice_2); PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_islice_2); __pyx_t_2 = __Pyx_Import(__pyx_n_s_itertools, __pyx_t_1, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 33, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_islice_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 33, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); if (PyDict_SetItem(__pyx_d, __pyx_n_s_islice, __pyx_t_1) < 0) __PYX_ERR(0, 33, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+0034: from itertools import zip_longest as _zip_longest
__pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 34, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_INCREF(__pyx_n_s_zip_longest_2); __Pyx_GIVEREF(__pyx_n_s_zip_longest_2); PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_zip_longest_2); __pyx_t_1 = __Pyx_Import(__pyx_n_s_itertools, __pyx_t_2, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 34, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_zip_longest_2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 34, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); if (PyDict_SetItem(__pyx_d, __pyx_n_s_zip_longest, __pyx_t_2) < 0) __PYX_ERR(0, 34, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
0035:
+0036: STR_MAX_CONTENT_SIZE: Address = 1000
if (PyDict_SetItem(__pyx_d, __pyx_n_s_STR_MAX_CONTENT_SIZE, __pyx_int_1000) < 0) __PYX_ERR(0, 36, __pyx_L1_error)
0037:
0038:
0039: # =====================================================================================================================
0040:
+0041: cdef void* PyMem_Calloc(size_t nelem, size_t elsize, bint zero):
static void *__pyx_f_10bytesparse_2_c_PyMem_Calloc(size_t __pyx_v_nelem, size_t __pyx_v_elsize, int __pyx_v_zero) { void *__pyx_v_ptr; size_t __pyx_v_total; void *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("PyMem_Calloc", 0); /* … */ /* function exit code */ __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
0042: cdef:
0043: void* ptr
+0044: size_t total = nelem * elsize
__pyx_v_total = (__pyx_v_nelem * __pyx_v_elsize);
0045:
+0046: if CannotMulSizeU(nelem, elsize):
__pyx_t_1 = (__pyx_f_10bytesparse_2_c_CannotMulSizeU(__pyx_v_nelem, __pyx_v_elsize) != 0); if (__pyx_t_1) { /* … */ }
+0047: return NULL # overflow
__pyx_r = NULL; goto __pyx_L0;
0048:
+0049: ptr = PyMem_Malloc(total)
__pyx_v_ptr = PyMem_Malloc(__pyx_v_total);
+0050: if ptr and zero:
__pyx_t_2 = (__pyx_v_ptr != 0); if (__pyx_t_2) { } else { __pyx_t_1 = __pyx_t_2; goto __pyx_L5_bool_binop_done; } __pyx_t_2 = (__pyx_v_zero != 0); __pyx_t_1 = __pyx_t_2; __pyx_L5_bool_binop_done:; if (__pyx_t_1) { /* … */ }
+0051: memset(ptr, 0, total)
(void)(memset(__pyx_v_ptr, 0, __pyx_v_total));
+0052: return ptr
__pyx_r = __pyx_v_ptr; goto __pyx_L0;
0053:
0054:
0055: # =====================================================================================================================
0056:
+0057: cdef size_t Downsize(size_t allocated, size_t requested) nogil:
static size_t __pyx_f_10bytesparse_2_c_Downsize(size_t __pyx_v_allocated, size_t __pyx_v_requested) { size_t __pyx_v_resized; size_t __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
0058: # Note: free margin will be either before and after allocated data
0059: cdef size_t resized
0060:
+0061: if requested < allocated >> 1:
__pyx_t_1 = ((__pyx_v_requested < (__pyx_v_allocated >> 1)) != 0); if (__pyx_t_1) { /* … */ goto __pyx_L3; }
0062: # Major downsize; allocate as per request
+0063: resized = requested
__pyx_v_resized = __pyx_v_requested;
0064:
0065: # Align to next MARGIN; always gives some additional MARGIN
0066: with cython.cdivision(True):
+0067: resized += (2 * MARGIN) - (resized % MARGIN)
__pyx_v_resized = (__pyx_v_resized + ((2 * MARGIN) - (__pyx_v_resized % MARGIN)));
0068: else:
0069: # Do not require reallocation
+0070: resized = allocated
/*else*/ { __pyx_v_resized = __pyx_v_allocated;
0071:
0072: # Align to next MARGIN; always gives some additional MARGIN
+0073: if resized < 2 * MARGIN:
__pyx_t_1 = ((__pyx_v_resized < (2 * MARGIN)) != 0); if (__pyx_t_1) { /* … */ } } __pyx_L3:;
+0074: resized = 2 * MARGIN
__pyx_v_resized = (2 * MARGIN);
0075:
+0076: return resized
__pyx_r = __pyx_v_resized; goto __pyx_L0;
0077:
0078:
+0079: cdef size_t Upsize(size_t allocated, size_t requested) nogil:
static size_t __pyx_f_10bytesparse_2_c_Upsize(size_t __pyx_v_allocated, size_t __pyx_v_requested) { size_t __pyx_v_resized; size_t __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
0080: # Note: free margin will be either before and after allocated data
+0081: cdef size_t resized = requested
__pyx_v_resized = __pyx_v_requested;
0082:
0083: # Moderate upsize; overallocate proportionally
+0084: if resized <= allocated + (allocated >> 3):
__pyx_t_1 = ((__pyx_v_resized <= (__pyx_v_allocated + (__pyx_v_allocated >> 3))) != 0); if (__pyx_t_1) { /* … */ }
+0085: resized += resized >> 3
__pyx_v_resized = (__pyx_v_resized + (__pyx_v_resized >> 3));
0086:
0087: # Align to next MARGIN; always gives some additional MARGIN
0088: with cython.cdivision(True):
+0089: resized += (2 * MARGIN) - (resized % MARGIN)
__pyx_v_resized = (__pyx_v_resized + ((2 * MARGIN) - (__pyx_v_resized % MARGIN)));
+0090: return resized
__pyx_r = __pyx_v_resized; goto __pyx_L0;
0091:
0092:
0093: # ---------------------------------------------------------------------------------------------------------------------
0094:
+0095: cdef void Reverse(byte_t* buffer, size_t start, size_t endin) nogil:
static void __pyx_f_10bytesparse_2_c_Reverse(byte_t *__pyx_v_buffer, size_t __pyx_v_start, size_t __pyx_v_endin) { byte_t __pyx_v_t; /* … */ /* function exit code */ }
0096: cdef:
0097: byte_t t
0098:
+0099: while start < endin:
while (1) { __pyx_t_1 = ((__pyx_v_start < __pyx_v_endin) != 0); if (!__pyx_t_1) break;
+0100: t = buffer[start]
__pyx_v_t = (__pyx_v_buffer[__pyx_v_start]);
+0101: buffer[start] = buffer[endin]
(__pyx_v_buffer[__pyx_v_start]) = (__pyx_v_buffer[__pyx_v_endin]);
+0102: buffer[endin] = t
(__pyx_v_buffer[__pyx_v_endin]) = __pyx_v_t;
+0103: start += 1
__pyx_v_start = (__pyx_v_start + 1);
+0104: endin -= 1
__pyx_v_endin = (__pyx_v_endin - 1); }
0105:
0106:
+0107: cdef bint IsSequence(object obj) except -1:
static int __pyx_f_10bytesparse_2_c_IsSequence(PyObject *__pyx_v_obj) { int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("IsSequence", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_5); __Pyx_XDECREF(__pyx_t_7); __Pyx_XDECREF(__pyx_t_8); __Pyx_AddTraceback("bytesparse._c.IsSequence", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+0108: try:
{ /*try:*/ { /* … */ } __pyx_L3_error:; __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; /* … */ __Pyx_XGIVEREF(__pyx_t_1); __Pyx_XGIVEREF(__pyx_t_2); __Pyx_XGIVEREF(__pyx_t_3); __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); goto __pyx_L1_error; __pyx_L7_try_return:; __Pyx_XGIVEREF(__pyx_t_1); __Pyx_XGIVEREF(__pyx_t_2); __Pyx_XGIVEREF(__pyx_t_3); __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); goto __pyx_L0; __pyx_L6_except_return:; __Pyx_XGIVEREF(__pyx_t_1); __Pyx_XGIVEREF(__pyx_t_2); __Pyx_XGIVEREF(__pyx_t_3); __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); goto __pyx_L0; }
+0109: len(obj)
__pyx_t_4 = PyObject_Length(__pyx_v_obj); if (unlikely(__pyx_t_4 == ((Py_ssize_t)-1))) __PYX_ERR(0, 109, __pyx_L3_error)
+0110: obj[0:0]
__pyx_t_5 = __Pyx_PyObject_GetSlice(__pyx_v_obj, 0, 0, NULL, NULL, &__pyx_slice_, 1, 1, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 110, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_5); __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; /* … */ __pyx_slice_ = PySlice_New(__pyx_int_0, __pyx_int_0, Py_None); if (unlikely(!__pyx_slice_)) __PYX_ERR(0, 110, __pyx_L1_error) __Pyx_GOTREF(__pyx_slice_); __Pyx_GIVEREF(__pyx_slice_);
+0111: return True
__pyx_r = 1; goto __pyx_L7_try_return;
+0112: except TypeError:
__pyx_t_6 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_TypeError); if (__pyx_t_6) { __Pyx_AddTraceback("bytesparse._c.IsSequence", __pyx_clineno, __pyx_lineno, __pyx_filename); if (__Pyx_GetException(&__pyx_t_5, &__pyx_t_7, &__pyx_t_8) < 0) __PYX_ERR(0, 112, __pyx_L5_except_error) __Pyx_GOTREF(__pyx_t_5); __Pyx_GOTREF(__pyx_t_7); __Pyx_GOTREF(__pyx_t_8);
+0113: return False
__pyx_r = 0; __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; goto __pyx_L6_except_return; } goto __pyx_L5_except_error; __pyx_L5_except_error:;
0114:
0115:
0116: # =====================================================================================================================
0117:
+0118: cdef bint CannotAddSizeU(size_t a, size_t b) nogil:
static int __pyx_f_10bytesparse_2_c_CannotAddSizeU(size_t __pyx_v_a, size_t __pyx_v_b) { int __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
+0119: return SIZE_MAX - a < b
__pyx_r = ((SIZE_MAX - __pyx_v_a) < __pyx_v_b); goto __pyx_L0;
0120:
0121:
+0122: cdef vint CheckAddSizeU(size_t a, size_t b) except -1:
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_CheckAddSizeU(size_t __pyx_v_a, size_t __pyx_v_b) { __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("CheckAddSizeU", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.CheckAddSizeU", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+0123: if CannotAddSizeU(a, b):
__pyx_t_1 = (__pyx_f_10bytesparse_2_c_CannotAddSizeU(__pyx_v_a, __pyx_v_b) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+0124: raise OverflowError()
__pyx_t_2 = __Pyx_PyObject_CallNoArg(__pyx_builtin_OverflowError); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 124, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 124, __pyx_L1_error)
0125:
0126:
+0127: cdef size_t AddSizeU(size_t a, size_t b) except? 0xDEAD:
static size_t __pyx_f_10bytesparse_2_c_AddSizeU(size_t __pyx_v_a, size_t __pyx_v_b) { size_t __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("AddSizeU", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.AddSizeU", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 0xDEAD; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+0128: CheckAddSizeU(a, b)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_CheckAddSizeU(__pyx_v_a, __pyx_v_b); if (unlikely(__pyx_t_1 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 128, __pyx_L1_error)
+0129: return a + b
__pyx_r = (__pyx_v_a + __pyx_v_b); goto __pyx_L0;
0130:
0131:
+0132: cdef bint CannotSubSizeU(size_t a, size_t b) nogil:
static int __pyx_f_10bytesparse_2_c_CannotSubSizeU(size_t __pyx_v_a, size_t __pyx_v_b) { int __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
+0133: return a < b
__pyx_r = (__pyx_v_a < __pyx_v_b); goto __pyx_L0;
0134:
0135:
+0136: cdef vint CheckSubSizeU(size_t a, size_t b) except -1:
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_CheckSubSizeU(size_t __pyx_v_a, size_t __pyx_v_b) { __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("CheckSubSizeU", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.CheckSubSizeU", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+0137: if CannotSubSizeU(a, b):
__pyx_t_1 = (__pyx_f_10bytesparse_2_c_CannotSubSizeU(__pyx_v_a, __pyx_v_b) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+0138: raise OverflowError()
__pyx_t_2 = __Pyx_PyObject_CallNoArg(__pyx_builtin_OverflowError); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 138, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 138, __pyx_L1_error)
0139:
0140:
+0141: cdef size_t SubSizeU(size_t a, size_t b) except? 0xDEAD:
static size_t __pyx_f_10bytesparse_2_c_SubSizeU(size_t __pyx_v_a, size_t __pyx_v_b) { size_t __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("SubSizeU", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.SubSizeU", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 0xDEAD; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+0142: CheckSubSizeU(a, b)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_CheckSubSizeU(__pyx_v_a, __pyx_v_b); if (unlikely(__pyx_t_1 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 142, __pyx_L1_error)
+0143: return a - b
__pyx_r = (__pyx_v_a - __pyx_v_b); goto __pyx_L0;
0144:
0145:
+0146: cdef bint CannotMulSizeU(size_t a, size_t b) nogil:
static int __pyx_f_10bytesparse_2_c_CannotMulSizeU(size_t __pyx_v_a, size_t __pyx_v_b) { size_t __pyx_v_r; int __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
0147: cdef:
+0148: size_t r = a * b
__pyx_v_r = (__pyx_v_a * __pyx_v_b);
+0149: return a and b and (r < a or r < b)
__pyx_t_2 = (__pyx_v_a != 0); if (__pyx_t_2) { } else { __pyx_t_1 = __pyx_t_2; goto __pyx_L3_bool_binop_done; } __pyx_t_2 = (__pyx_v_b != 0); if (__pyx_t_2) { } else { __pyx_t_1 = __pyx_t_2; goto __pyx_L3_bool_binop_done; } __pyx_t_2 = ((__pyx_v_r < __pyx_v_a) != 0); if (!__pyx_t_2) { } else { __pyx_t_1 = __pyx_t_2; goto __pyx_L3_bool_binop_done; } __pyx_t_2 = ((__pyx_v_r < __pyx_v_b) != 0); __pyx_t_1 = __pyx_t_2; __pyx_L3_bool_binop_done:; __pyx_r = __pyx_t_1; goto __pyx_L0;
0150:
0151:
+0152: cdef vint CheckMulSizeU(size_t a, size_t b) except -1:
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_CheckMulSizeU(size_t __pyx_v_a, size_t __pyx_v_b) { __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("CheckMulSizeU", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.CheckMulSizeU", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+0153: if CannotMulSizeU(a, b):
__pyx_t_1 = (__pyx_f_10bytesparse_2_c_CannotMulSizeU(__pyx_v_a, __pyx_v_b) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+0154: raise OverflowError()
__pyx_t_2 = __Pyx_PyObject_CallNoArg(__pyx_builtin_OverflowError); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 154, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 154, __pyx_L1_error)
0155:
0156:
+0157: cdef size_t MulSizeU(size_t a, size_t b) except? 0xDEAD:
static size_t __pyx_f_10bytesparse_2_c_MulSizeU(size_t __pyx_v_a, size_t __pyx_v_b) { size_t __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("MulSizeU", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.MulSizeU", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 0xDEAD; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+0158: CheckMulSizeU(a, b)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_CheckMulSizeU(__pyx_v_a, __pyx_v_b); if (unlikely(__pyx_t_1 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 158, __pyx_L1_error)
+0159: return a * b
__pyx_r = (__pyx_v_a * __pyx_v_b); goto __pyx_L0;
0160:
0161:
0162: # ---------------------------------------------------------------------------------------------------------------------
0163:
+0164: cdef bint CannotAddSizeS(ssize_t a, ssize_t b) nogil:
static int __pyx_f_10bytesparse_2_c_CannotAddSizeS(Py_ssize_t __pyx_v_a, Py_ssize_t __pyx_v_b) { int __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
+0165: return ((b > 0 and a > SSIZE_MAX - b) or
__pyx_t_2 = ((__pyx_v_b > 0) != 0); if (!__pyx_t_2) { goto __pyx_L4_next_or; } else { } __pyx_t_2 = ((__pyx_v_a > (SSIZE_MAX - __pyx_v_b)) != 0); if (!__pyx_t_2) { } else { __pyx_t_1 = __pyx_t_2; goto __pyx_L3_bool_binop_done; } __pyx_L4_next_or:;
+0166: (b < 0 and a < SSIZE_MIN - b))
__pyx_t_2 = ((__pyx_v_b < 0) != 0); if (__pyx_t_2) { } else { __pyx_t_1 = __pyx_t_2; goto __pyx_L3_bool_binop_done; } __pyx_t_2 = ((__pyx_v_a < (SSIZE_MIN - __pyx_v_b)) != 0); __pyx_t_1 = __pyx_t_2; __pyx_L3_bool_binop_done:; __pyx_r = __pyx_t_1; goto __pyx_L0;
0167:
0168:
+0169: cdef vint CheckAddSizeS(ssize_t a, ssize_t b) except -1:
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_CheckAddSizeS(Py_ssize_t __pyx_v_a, Py_ssize_t __pyx_v_b) { __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("CheckAddSizeS", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.CheckAddSizeS", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+0170: if CannotAddSizeS(a, b):
__pyx_t_1 = (__pyx_f_10bytesparse_2_c_CannotAddSizeS(__pyx_v_a, __pyx_v_b) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+0171: raise OverflowError()
__pyx_t_2 = __Pyx_PyObject_CallNoArg(__pyx_builtin_OverflowError); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 171, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 171, __pyx_L1_error)
0172:
0173:
+0174: cdef ssize_t AddSizeS(ssize_t a, ssize_t b) except? 0xDEAD:
static Py_ssize_t __pyx_f_10bytesparse_2_c_AddSizeS(Py_ssize_t __pyx_v_a, Py_ssize_t __pyx_v_b) { Py_ssize_t __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("AddSizeS", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.AddSizeS", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 0xDEAD; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+0175: CheckAddSizeS(a, b)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_CheckAddSizeS(__pyx_v_a, __pyx_v_b); if (unlikely(__pyx_t_1 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 175, __pyx_L1_error)
+0176: return a + b
__pyx_r = (__pyx_v_a + __pyx_v_b); goto __pyx_L0;
0177:
0178:
+0179: cdef bint CannotSubSizeS(ssize_t a, ssize_t b) nogil:
static int __pyx_f_10bytesparse_2_c_CannotSubSizeS(Py_ssize_t __pyx_v_a, Py_ssize_t __pyx_v_b) { int __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
+0180: return ((b > 0 and a < SSIZE_MIN + b) or
__pyx_t_2 = ((__pyx_v_b > 0) != 0); if (!__pyx_t_2) { goto __pyx_L4_next_or; } else { } __pyx_t_2 = ((__pyx_v_a < (SSIZE_MIN + __pyx_v_b)) != 0); if (!__pyx_t_2) { } else { __pyx_t_1 = __pyx_t_2; goto __pyx_L3_bool_binop_done; } __pyx_L4_next_or:;
+0181: (b < 0 and a > SSIZE_MAX + b))
__pyx_t_2 = ((__pyx_v_b < 0) != 0); if (__pyx_t_2) { } else { __pyx_t_1 = __pyx_t_2; goto __pyx_L3_bool_binop_done; } __pyx_t_2 = ((__pyx_v_a > (SSIZE_MAX + __pyx_v_b)) != 0); __pyx_t_1 = __pyx_t_2; __pyx_L3_bool_binop_done:; __pyx_r = __pyx_t_1; goto __pyx_L0;
0182:
0183:
+0184: cdef vint CheckSubSizeS(ssize_t a, ssize_t b) except -1:
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_CheckSubSizeS(Py_ssize_t __pyx_v_a, Py_ssize_t __pyx_v_b) { __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("CheckSubSizeS", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.CheckSubSizeS", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+0185: if CannotSubSizeS(a, b):
__pyx_t_1 = (__pyx_f_10bytesparse_2_c_CannotSubSizeS(__pyx_v_a, __pyx_v_b) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+0186: raise OverflowError()
__pyx_t_2 = __Pyx_PyObject_CallNoArg(__pyx_builtin_OverflowError); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 186, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 186, __pyx_L1_error)
0187:
0188:
+0189: cdef ssize_t SubSizeS(ssize_t a, ssize_t b) except? 0xDEAD:
static Py_ssize_t __pyx_f_10bytesparse_2_c_SubSizeS(Py_ssize_t __pyx_v_a, Py_ssize_t __pyx_v_b) { Py_ssize_t __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("SubSizeS", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.SubSizeS", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 0xDEAD; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+0190: CheckSubSizeS(a, b)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_CheckSubSizeS(__pyx_v_a, __pyx_v_b); if (unlikely(__pyx_t_1 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 190, __pyx_L1_error)
+0191: return a - b
__pyx_r = (__pyx_v_a - __pyx_v_b); goto __pyx_L0;
0192:
0193:
+0194: cdef bint CannotMulSizeS(ssize_t a, ssize_t b) nogil:
static int __pyx_f_10bytesparse_2_c_CannotMulSizeS(Py_ssize_t __pyx_v_a, Py_ssize_t __pyx_v_b) { int __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
0195: with cython.cdivision(True):
+0196: if a > 0:
__pyx_t_1 = ((__pyx_v_a > 0) != 0); if (__pyx_t_1) { /* … */ }
+0197: if b > 0:
__pyx_t_1 = ((__pyx_v_b > 0) != 0); if (__pyx_t_1) { /* … */ }
+0198: return a > (SSIZE_MAX // b)
__pyx_r = (__pyx_v_a > (SSIZE_MAX / __pyx_v_b)); goto __pyx_L0;
0199: else:
+0200: return b < (SSIZE_MIN // a)
/*else*/ { __pyx_r = (__pyx_v_b < (SSIZE_MIN / __pyx_v_a)); goto __pyx_L0; }
0201: else:
+0202: if b > 0:
/*else*/ { __pyx_t_1 = ((__pyx_v_b > 0) != 0); if (__pyx_t_1) { /* … */ }
+0203: return a < (SSIZE_MIN // b)
__pyx_r = (__pyx_v_a < (SSIZE_MIN / __pyx_v_b)); goto __pyx_L0;
0204: else:
+0205: return a and b < (SSIZE_MAX // a)
/*else*/ { __pyx_t_2 = (__pyx_v_a != 0); if (__pyx_t_2) { } else { __pyx_t_1 = __pyx_t_2; goto __pyx_L6_bool_binop_done; } __pyx_t_2 = ((__pyx_v_b < (SSIZE_MAX / __pyx_v_a)) != 0); __pyx_t_1 = __pyx_t_2; __pyx_L6_bool_binop_done:; __pyx_r = __pyx_t_1; goto __pyx_L0; } }
0206:
0207:
+0208: cdef vint CheckMulSizeS(ssize_t a, ssize_t b) except -1:
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_CheckMulSizeS(Py_ssize_t __pyx_v_a, Py_ssize_t __pyx_v_b) { __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("CheckMulSizeS", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.CheckMulSizeS", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+0209: if CannotMulSizeS(a, b):
__pyx_t_1 = (__pyx_f_10bytesparse_2_c_CannotMulSizeS(__pyx_v_a, __pyx_v_b) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+0210: raise OverflowError()
__pyx_t_2 = __Pyx_PyObject_CallNoArg(__pyx_builtin_OverflowError); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 210, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 210, __pyx_L1_error)
0211:
0212:
+0213: cdef ssize_t MulSizeS(ssize_t a, ssize_t b) except? 0xDEAD:
static Py_ssize_t __pyx_f_10bytesparse_2_c_MulSizeS(Py_ssize_t __pyx_v_a, Py_ssize_t __pyx_v_b) { Py_ssize_t __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("MulSizeS", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.MulSizeS", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 0xDEAD; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+0214: CheckMulSizeS(a, b)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_CheckMulSizeS(__pyx_v_a, __pyx_v_b); if (unlikely(__pyx_t_1 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 214, __pyx_L1_error)
+0215: return a * b
__pyx_r = (__pyx_v_a * __pyx_v_b); goto __pyx_L0;
0216:
0217:
0218: # =====================================================================================================================
0219:
+0220: cdef bint CannotAddAddrU(addr_t a, addr_t b) nogil:
static int __pyx_f_10bytesparse_2_c_CannotAddAddrU(addr_t __pyx_v_a, addr_t __pyx_v_b) { int __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
+0221: return ADDR_MAX - a < b
__pyx_r = ((ADDR_MAX - __pyx_v_a) < __pyx_v_b); goto __pyx_L0;
0222:
0223:
+0224: cdef vint CheckAddAddrU(addr_t a, addr_t b) except -1:
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_CheckAddAddrU(addr_t __pyx_v_a, addr_t __pyx_v_b) { __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("CheckAddAddrU", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.CheckAddAddrU", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+0225: if CannotAddAddrU(a, b):
__pyx_t_1 = (__pyx_f_10bytesparse_2_c_CannotAddAddrU(__pyx_v_a, __pyx_v_b) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+0226: raise OverflowError()
__pyx_t_2 = __Pyx_PyObject_CallNoArg(__pyx_builtin_OverflowError); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 226, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 226, __pyx_L1_error)
0227:
0228:
+0229: cdef addr_t AddAddrU(addr_t a, addr_t b) except? 0xDEAD:
static addr_t __pyx_f_10bytesparse_2_c_AddAddrU(addr_t __pyx_v_a, addr_t __pyx_v_b) { addr_t __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("AddAddrU", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.AddAddrU", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 0xDEAD; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+0230: CheckAddAddrU(a, b)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_CheckAddAddrU(__pyx_v_a, __pyx_v_b); if (unlikely(__pyx_t_1 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 230, __pyx_L1_error)
+0231: return a + b
__pyx_r = (__pyx_v_a + __pyx_v_b); goto __pyx_L0;
0232:
0233:
+0234: cdef bint CannotSubAddrU(addr_t a, addr_t b) nogil:
static int __pyx_f_10bytesparse_2_c_CannotSubAddrU(addr_t __pyx_v_a, addr_t __pyx_v_b) { int __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
+0235: return a < b
__pyx_r = (__pyx_v_a < __pyx_v_b); goto __pyx_L0;
0236:
0237:
+0238: cdef vint CheckSubAddrU(addr_t a, addr_t b) except -1:
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_CheckSubAddrU(addr_t __pyx_v_a, addr_t __pyx_v_b) { __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("CheckSubAddrU", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.CheckSubAddrU", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+0239: if CannotSubAddrU(a, b):
__pyx_t_1 = (__pyx_f_10bytesparse_2_c_CannotSubAddrU(__pyx_v_a, __pyx_v_b) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+0240: raise OverflowError()
__pyx_t_2 = __Pyx_PyObject_CallNoArg(__pyx_builtin_OverflowError); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 240, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 240, __pyx_L1_error)
0241:
0242:
+0243: cdef addr_t SubAddrU(addr_t a, addr_t b) except? 0xDEAD:
static addr_t __pyx_f_10bytesparse_2_c_SubAddrU(addr_t __pyx_v_a, addr_t __pyx_v_b) { addr_t __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("SubAddrU", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.SubAddrU", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 0xDEAD; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+0244: CheckSubAddrU(a, b)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_CheckSubAddrU(__pyx_v_a, __pyx_v_b); if (unlikely(__pyx_t_1 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 244, __pyx_L1_error)
+0245: return a - b
__pyx_r = (__pyx_v_a - __pyx_v_b); goto __pyx_L0;
0246:
0247:
+0248: cdef bint CannotMulAddrU(addr_t a, addr_t b) nogil:
static int __pyx_f_10bytesparse_2_c_CannotMulAddrU(addr_t __pyx_v_a, addr_t __pyx_v_b) { addr_t __pyx_v_r; int __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
0249: cdef:
+0250: addr_t r = a * b
__pyx_v_r = (__pyx_v_a * __pyx_v_b);
+0251: return a and b and (r < a or r < b)
__pyx_t_2 = (__pyx_v_a != 0); if (__pyx_t_2) { } else { __pyx_t_1 = __pyx_t_2; goto __pyx_L3_bool_binop_done; } __pyx_t_2 = (__pyx_v_b != 0); if (__pyx_t_2) { } else { __pyx_t_1 = __pyx_t_2; goto __pyx_L3_bool_binop_done; } __pyx_t_2 = ((__pyx_v_r < __pyx_v_a) != 0); if (!__pyx_t_2) { } else { __pyx_t_1 = __pyx_t_2; goto __pyx_L3_bool_binop_done; } __pyx_t_2 = ((__pyx_v_r < __pyx_v_b) != 0); __pyx_t_1 = __pyx_t_2; __pyx_L3_bool_binop_done:; __pyx_r = __pyx_t_1; goto __pyx_L0;
0252:
0253:
+0254: cdef vint CheckMulAddrU(addr_t a, addr_t b) except -1:
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_CheckMulAddrU(addr_t __pyx_v_a, addr_t __pyx_v_b) { __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("CheckMulAddrU", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.CheckMulAddrU", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+0255: if CannotMulAddrU(a, b):
__pyx_t_1 = (__pyx_f_10bytesparse_2_c_CannotMulAddrU(__pyx_v_a, __pyx_v_b) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+0256: raise OverflowError()
__pyx_t_2 = __Pyx_PyObject_CallNoArg(__pyx_builtin_OverflowError); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 256, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 256, __pyx_L1_error)
0257:
0258:
+0259: cdef addr_t MulAddrU(addr_t a, addr_t b) except? 0xDEAD:
static addr_t __pyx_f_10bytesparse_2_c_MulAddrU(addr_t __pyx_v_a, addr_t __pyx_v_b) { addr_t __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("MulAddrU", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.MulAddrU", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 0xDEAD; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+0260: CheckMulAddrU(a, b)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_CheckMulAddrU(__pyx_v_a, __pyx_v_b); if (unlikely(__pyx_t_1 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 260, __pyx_L1_error)
+0261: return a * b
__pyx_r = (__pyx_v_a * __pyx_v_b); goto __pyx_L0;
0262:
0263:
+0264: cdef bint CannotAddrToSizeU(addr_t a) nogil:
static int __pyx_f_10bytesparse_2_c_CannotAddrToSizeU(addr_t __pyx_v_a) { int __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
+0265: return a > <addr_t>SIZE_MAX
__pyx_r = (__pyx_v_a > ((addr_t)SIZE_MAX)); goto __pyx_L0;
0266:
0267:
+0268: cdef vint CheckAddrToSizeU(addr_t a) except -1:
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_CheckAddrToSizeU(addr_t __pyx_v_a) { __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("CheckAddrToSizeU", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.CheckAddrToSizeU", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+0269: if CannotAddrToSizeU(a):
__pyx_t_1 = (__pyx_f_10bytesparse_2_c_CannotAddrToSizeU(__pyx_v_a) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+0270: raise OverflowError()
__pyx_t_2 = __Pyx_PyObject_CallNoArg(__pyx_builtin_OverflowError); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 270, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 270, __pyx_L1_error)
0271:
0272:
+0273: cdef size_t AddrToSizeU(addr_t a) except? 0xDEAD:
static size_t __pyx_f_10bytesparse_2_c_AddrToSizeU(addr_t __pyx_v_a) { size_t __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("AddrToSizeU", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.AddrToSizeU", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 0xDEAD; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+0274: CheckAddrToSizeU(a)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_CheckAddrToSizeU(__pyx_v_a); if (unlikely(__pyx_t_1 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 274, __pyx_L1_error)
+0275: return <size_t>a
__pyx_r = ((size_t)__pyx_v_a); goto __pyx_L0;
0276:
0277:
0278: # ---------------------------------------------------------------------------------------------------------------------
0279:
+0280: cdef bint CannotAddAddrS(saddr_t a, saddr_t b) nogil:
static int __pyx_f_10bytesparse_2_c_CannotAddAddrS(saddr_t __pyx_v_a, saddr_t __pyx_v_b) { int __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
+0281: return ((b > 0 and a > SADDR_MAX - b) or
__pyx_t_2 = ((__pyx_v_b > 0) != 0); if (!__pyx_t_2) { goto __pyx_L4_next_or; } else { } __pyx_t_2 = ((__pyx_v_a > (SADDR_MAX - __pyx_v_b)) != 0); if (!__pyx_t_2) { } else { __pyx_t_1 = __pyx_t_2; goto __pyx_L3_bool_binop_done; } __pyx_L4_next_or:;
+0282: (b < 0 and a < SADDR_MIN - b))
__pyx_t_2 = ((__pyx_v_b < 0) != 0); if (__pyx_t_2) { } else { __pyx_t_1 = __pyx_t_2; goto __pyx_L3_bool_binop_done; } __pyx_t_2 = ((__pyx_v_a < (SADDR_MIN - __pyx_v_b)) != 0); __pyx_t_1 = __pyx_t_2; __pyx_L3_bool_binop_done:; __pyx_r = __pyx_t_1; goto __pyx_L0;
0283:
0284:
+0285: cdef vint CheckAddAddrS(saddr_t a, saddr_t b) except -1:
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_CheckAddAddrS(saddr_t __pyx_v_a, saddr_t __pyx_v_b) { __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("CheckAddAddrS", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.CheckAddAddrS", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+0286: if CannotAddAddrS(a, b):
__pyx_t_1 = (__pyx_f_10bytesparse_2_c_CannotAddAddrS(__pyx_v_a, __pyx_v_b) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+0287: raise OverflowError()
__pyx_t_2 = __Pyx_PyObject_CallNoArg(__pyx_builtin_OverflowError); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 287, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 287, __pyx_L1_error)
0288:
0289:
+0290: cdef saddr_t AddAddrS(saddr_t a, saddr_t b) except? 0xDEAD:
static saddr_t __pyx_f_10bytesparse_2_c_AddAddrS(saddr_t __pyx_v_a, saddr_t __pyx_v_b) { saddr_t __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("AddAddrS", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.AddAddrS", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 0xDEAD; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+0291: CheckAddAddrS(a, b)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_CheckAddAddrS(__pyx_v_a, __pyx_v_b); if (unlikely(__pyx_t_1 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 291, __pyx_L1_error)
+0292: return a + b
__pyx_r = (__pyx_v_a + __pyx_v_b); goto __pyx_L0;
0293:
0294:
+0295: cdef bint CannotSubAddrS(saddr_t a, saddr_t b) nogil:
static int __pyx_f_10bytesparse_2_c_CannotSubAddrS(saddr_t __pyx_v_a, saddr_t __pyx_v_b) { int __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
+0296: return ((b > 0 and a < SADDR_MIN + b) or
__pyx_t_2 = ((__pyx_v_b > 0) != 0); if (!__pyx_t_2) { goto __pyx_L4_next_or; } else { } __pyx_t_2 = ((__pyx_v_a < (SADDR_MIN + __pyx_v_b)) != 0); if (!__pyx_t_2) { } else { __pyx_t_1 = __pyx_t_2; goto __pyx_L3_bool_binop_done; } __pyx_L4_next_or:;
+0297: (b < 0 and a > SADDR_MAX + b))
__pyx_t_2 = ((__pyx_v_b < 0) != 0); if (__pyx_t_2) { } else { __pyx_t_1 = __pyx_t_2; goto __pyx_L3_bool_binop_done; } __pyx_t_2 = ((__pyx_v_a > (SADDR_MAX + __pyx_v_b)) != 0); __pyx_t_1 = __pyx_t_2; __pyx_L3_bool_binop_done:; __pyx_r = __pyx_t_1; goto __pyx_L0;
0298:
0299:
+0300: cdef vint CheckSubAddrS(saddr_t a, saddr_t b) except -1:
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_CheckSubAddrS(saddr_t __pyx_v_a, saddr_t __pyx_v_b) { __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("CheckSubAddrS", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.CheckSubAddrS", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+0301: if CannotSubAddrS(a, b):
__pyx_t_1 = (__pyx_f_10bytesparse_2_c_CannotSubAddrS(__pyx_v_a, __pyx_v_b) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+0302: raise OverflowError()
__pyx_t_2 = __Pyx_PyObject_CallNoArg(__pyx_builtin_OverflowError); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 302, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 302, __pyx_L1_error)
0303:
0304:
+0305: cdef saddr_t SubAddrS(saddr_t a, saddr_t b) except? 0xDEAD:
static saddr_t __pyx_f_10bytesparse_2_c_SubAddrS(saddr_t __pyx_v_a, saddr_t __pyx_v_b) { saddr_t __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("SubAddrS", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.SubAddrS", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 0xDEAD; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+0306: CheckSubAddrS(a, b)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_CheckSubAddrS(__pyx_v_a, __pyx_v_b); if (unlikely(__pyx_t_1 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 306, __pyx_L1_error)
+0307: return a - b
__pyx_r = (__pyx_v_a - __pyx_v_b); goto __pyx_L0;
0308:
0309:
+0310: cdef bint CannotMulAddrS(saddr_t a, saddr_t b) nogil:
static int __pyx_f_10bytesparse_2_c_CannotMulAddrS(saddr_t __pyx_v_a, saddr_t __pyx_v_b) { int __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
0311: with cython.cdivision(True):
+0312: if a > 0:
__pyx_t_1 = ((__pyx_v_a > 0) != 0); if (__pyx_t_1) { /* … */ }
+0313: if b > 0:
__pyx_t_1 = ((__pyx_v_b > 0) != 0); if (__pyx_t_1) { /* … */ }
+0314: return a > (SADDR_MAX // b)
__pyx_r = (__pyx_v_a > (SADDR_MAX / __pyx_v_b)); goto __pyx_L0;
0315: else:
+0316: return b < (SADDR_MIN // a)
/*else*/ { __pyx_r = (__pyx_v_b < (SADDR_MIN / __pyx_v_a)); goto __pyx_L0; }
0317: else:
+0318: if b > 0:
/*else*/ { __pyx_t_1 = ((__pyx_v_b > 0) != 0); if (__pyx_t_1) { /* … */ }
+0319: return a < (SADDR_MIN // b)
__pyx_r = (__pyx_v_a < (SADDR_MIN / __pyx_v_b)); goto __pyx_L0;
0320: else:
+0321: return a and b < (SADDR_MAX // a)
/*else*/ { __pyx_t_2 = (__pyx_v_a != 0); if (__pyx_t_2) { } else { __pyx_t_1 = __pyx_t_2; goto __pyx_L6_bool_binop_done; } __pyx_t_2 = ((__pyx_v_b < (SADDR_MAX / __pyx_v_a)) != 0); __pyx_t_1 = __pyx_t_2; __pyx_L6_bool_binop_done:; __pyx_r = __pyx_t_1; goto __pyx_L0; } }
0322:
0323:
+0324: cdef vint CheckMulAddrS(saddr_t a, saddr_t b) except -1:
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_CheckMulAddrS(saddr_t __pyx_v_a, saddr_t __pyx_v_b) { __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("CheckMulAddrS", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.CheckMulAddrS", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+0325: if CannotMulAddrS(a, b):
__pyx_t_1 = (__pyx_f_10bytesparse_2_c_CannotMulAddrS(__pyx_v_a, __pyx_v_b) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+0326: raise OverflowError()
__pyx_t_2 = __Pyx_PyObject_CallNoArg(__pyx_builtin_OverflowError); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 326, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 326, __pyx_L1_error)
0327:
0328:
+0329: cdef saddr_t MulAddrS(saddr_t a, saddr_t b) except? 0xDEAD:
static saddr_t __pyx_f_10bytesparse_2_c_MulAddrS(saddr_t __pyx_v_a, saddr_t __pyx_v_b) { saddr_t __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("MulAddrS", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.MulAddrS", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 0xDEAD; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+0330: CheckMulAddrS(a, b)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_CheckMulAddrS(__pyx_v_a, __pyx_v_b); if (unlikely(__pyx_t_1 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 330, __pyx_L1_error)
+0331: return a * b
__pyx_r = (__pyx_v_a * __pyx_v_b); goto __pyx_L0;
0332:
0333:
+0334: cdef bint CannotAddrToSizeS(saddr_t a) nogil:
static int __pyx_f_10bytesparse_2_c_CannotAddrToSizeS(saddr_t __pyx_v_a) { int __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
+0335: return a < <saddr_t>SSIZE_MIN or a > <saddr_t>SSIZE_MAX
__pyx_t_2 = ((__pyx_v_a < ((saddr_t)SSIZE_MIN)) != 0); if (!__pyx_t_2) { } else { __pyx_t_1 = __pyx_t_2; goto __pyx_L3_bool_binop_done; } __pyx_t_2 = ((__pyx_v_a > ((saddr_t)SSIZE_MAX)) != 0); __pyx_t_1 = __pyx_t_2; __pyx_L3_bool_binop_done:; __pyx_r = __pyx_t_1; goto __pyx_L0;
0336:
0337:
+0338: cdef vint CheckAddrToSizeS(saddr_t a) except -1:
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_CheckAddrToSizeS(saddr_t __pyx_v_a) { __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("CheckAddrToSizeS", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.CheckAddrToSizeS", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+0339: if CannotAddrToSizeS(a):
__pyx_t_1 = (__pyx_f_10bytesparse_2_c_CannotAddrToSizeS(__pyx_v_a) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+0340: raise OverflowError()
__pyx_t_2 = __Pyx_PyObject_CallNoArg(__pyx_builtin_OverflowError); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 340, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 340, __pyx_L1_error)
0341:
0342:
+0343: cdef ssize_t AddrToSizeS(saddr_t a) except? 0xDEAD:
static Py_ssize_t __pyx_f_10bytesparse_2_c_AddrToSizeS(saddr_t __pyx_v_a) { Py_ssize_t __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("AddrToSizeS", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.AddrToSizeS", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 0xDEAD; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+0344: CheckAddrToSizeS(a)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_CheckAddrToSizeS(__pyx_v_a); if (unlikely(__pyx_t_1 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 344, __pyx_L1_error)
+0345: return <ssize_t>a
__pyx_r = ((Py_ssize_t)__pyx_v_a); goto __pyx_L0;
0346:
0347:
0348: # =====================================================================================================================
0349:
+0350: cdef Block_* Block_Alloc(addr_t address, size_t size, bint zero) except NULL:
static Block_ *__pyx_f_10bytesparse_2_c_Block_Alloc(addr_t __pyx_v_address, size_t __pyx_v_size, int __pyx_v_zero) { Block_ *__pyx_v_that; size_t __pyx_v_allocated; Block_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_Alloc", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.Block_Alloc", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
0351: cdef:
+0352: Block_* that = NULL
__pyx_v_that = NULL;
0353: size_t allocated
0354:
+0355: if size > SIZE_HMAX:
__pyx_t_1 = ((__pyx_v_size > SIZE_HMAX) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+0356: raise OverflowError('size overflow')
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_OverflowError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 356, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 356, __pyx_L1_error) /* … */ __pyx_tuple__2 = PyTuple_Pack(1, __pyx_kp_u_size_overflow); if (unlikely(!__pyx_tuple__2)) __PYX_ERR(0, 356, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__2); __Pyx_GIVEREF(__pyx_tuple__2);
0357:
0358: # Allocate as per request
+0359: allocated = Upsize(0, size)
__pyx_v_allocated = __pyx_f_10bytesparse_2_c_Upsize(0, __pyx_v_size);
+0360: if allocated > SIZE_HMAX:
__pyx_t_1 = ((__pyx_v_allocated > SIZE_HMAX) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+0361: raise MemoryError()
PyErr_NoMemory(); __PYX_ERR(0, 361, __pyx_L1_error)
0362:
+0363: that = <Block_*>PyMem_Calloc(Block_HEADING + (allocated * sizeof(byte_t)), 1, zero)
__pyx_v_that = ((Block_ *)__pyx_f_10bytesparse_2_c_PyMem_Calloc((Block_HEADING + (__pyx_v_allocated * (sizeof(byte_t)))), 1, __pyx_v_zero));
+0364: if that == NULL:
__pyx_t_1 = ((__pyx_v_that == NULL) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+0365: raise MemoryError()
PyErr_NoMemory(); __PYX_ERR(0, 365, __pyx_L1_error)
0366:
+0367: that.address = address
__pyx_v_that->address = __pyx_v_address;
+0368: that.references = 1 # acquired by default
__pyx_v_that->references = 1;
+0369: that.allocated = allocated
__pyx_v_that->allocated = __pyx_v_allocated;
+0370: that.start = MARGIN # leave some initial room
__pyx_v_that->start = MARGIN;
+0371: that.endex = that.start + size
__pyx_v_that->endex = (__pyx_v_that->start + __pyx_v_size);
+0372: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
0373:
0374:
+0375: cdef Block_* Block_Free(Block_* that):
static Block_ *__pyx_f_10bytesparse_2_c_Block_Free(Block_ *__pyx_v_that) { Block_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_Free", 0); /* … */ /* function exit code */ __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+0376: if that:
__pyx_t_1 = (__pyx_v_that != 0); if (__pyx_t_1) { /* … */ }
+0377: PyMem_Free(that)
PyMem_Free(__pyx_v_that);
+0378: return NULL
__pyx_r = NULL; goto __pyx_L0;
0379:
0380:
+0381: cdef Block_* Block_Create(addr_t address, size_t size, const byte_t* buffer) except NULL:
static Block_ *__pyx_f_10bytesparse_2_c_Block_Create(addr_t __pyx_v_address, size_t __pyx_v_size, byte_t const *__pyx_v_buffer) { Block_ *__pyx_v_that; Block_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_Create", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_4); __Pyx_AddTraceback("bytesparse._c.Block_Create", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+0382: if not size or buffer:
__pyx_t_2 = ((!(__pyx_v_size != 0)) != 0); if (!__pyx_t_2) { } else { __pyx_t_1 = __pyx_t_2; goto __pyx_L4_bool_binop_done; } __pyx_t_2 = (__pyx_v_buffer != 0); __pyx_t_1 = __pyx_t_2; __pyx_L4_bool_binop_done:; if (likely(__pyx_t_1)) { /* … */ }
+0383: that = Block_Alloc(address, size, False)
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Block_Alloc(__pyx_v_address, __pyx_v_size, 0); if (unlikely(__pyx_t_3 == ((Block_ *)NULL))) __PYX_ERR(0, 383, __pyx_L1_error)
__pyx_v_that = __pyx_t_3;
+0384: memcpy(&that.data[that.start], buffer, size * sizeof(byte_t))
(void)(memcpy((&(__pyx_v_that->data[__pyx_v_that->start])), __pyx_v_buffer, (__pyx_v_size * (sizeof(byte_t)))));
+0385: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
0386: else:
+0387: raise ValueError('null pointer')
/*else*/ { __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__3, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 387, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_Raise(__pyx_t_4, 0, 0, 0); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __PYX_ERR(0, 387, __pyx_L1_error) } /* … */ __pyx_tuple__3 = PyTuple_Pack(1, __pyx_kp_u_null_pointer); if (unlikely(!__pyx_tuple__3)) __PYX_ERR(0, 387, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__3); __Pyx_GIVEREF(__pyx_tuple__3);
0388:
0389:
+0390: cdef Block_* Block_Copy(const Block_* that) except NULL:
static Block_ *__pyx_f_10bytesparse_2_c_Block_Copy(Block_ const *__pyx_v_that) { Block_ *__pyx_v_ptr; size_t __pyx_v_size; Block_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_Copy", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.Block_Copy", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
0391: cdef:
0392: Block_* ptr
0393: size_t size
0394:
+0395: if that:
__pyx_t_1 = (__pyx_v_that != 0); if (likely(__pyx_t_1)) { /* … */ }
+0396: size = Block_HEADING + (that.allocated * sizeof(byte_t))
__pyx_v_size = (Block_HEADING + (__pyx_v_that->allocated * (sizeof(byte_t))));
+0397: ptr = <Block_*>PyMem_Malloc(size)
__pyx_v_ptr = ((Block_ *)PyMem_Malloc(__pyx_v_size));
+0398: if ptr == NULL:
__pyx_t_1 = ((__pyx_v_ptr == NULL) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+0399: raise MemoryError()
PyErr_NoMemory(); __PYX_ERR(0, 399, __pyx_L1_error)
0400:
+0401: memcpy(ptr, that, size)
(void)(memcpy(__pyx_v_ptr, __pyx_v_that, __pyx_v_size));
+0402: ptr.references = 1 # acquired by default
__pyx_v_ptr->references = 1;
+0403: return ptr
__pyx_r = __pyx_v_ptr; goto __pyx_L0;
0404: else:
+0405: raise ValueError('null pointer')
/*else*/ { __pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__3, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 405, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 405, __pyx_L1_error) }
0406:
0407:
+0408: cdef Block_* Block_FromObject(addr_t address, object obj, bint nonnull) except NULL:
static Block_ *__pyx_f_10bytesparse_2_c_Block_FromObject(addr_t __pyx_v_address, PyObject *__pyx_v_obj, int __pyx_v_nonnull) { byte_t __pyx_v_value; __Pyx_memviewslice __pyx_v_view = { 0, 0, { 0 }, { 0 }, { 0 } }; size_t __pyx_v_size; byte_t const *__pyx_v_ptr; Block_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_FromObject", 0); /* … */ /* function exit code */ __pyx_L1_error:; __PYX_XDEC_MEMVIEW(&__pyx_t_8, 1); __Pyx_XDECREF(__pyx_t_10); __Pyx_XDECREF(__pyx_t_11); __Pyx_XDECREF(__pyx_t_12); __Pyx_XDECREF(__pyx_t_13); __Pyx_AddTraceback("bytesparse._c.Block_FromObject", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __PYX_XDEC_MEMVIEW(&__pyx_v_view, 1); __Pyx_RefNannyFinishContext(); return __pyx_r; }
0409: cdef:
0410: byte_t value
0411: const byte_t[:] view
0412: size_t size
0413: const byte_t* ptr
0414:
+0415: if isinstance(obj, int):
__pyx_t_1 = PyInt_Check(__pyx_v_obj);
__pyx_t_2 = (__pyx_t_1 != 0);
if (__pyx_t_2) {
/* … */
}
+0416: value = <byte_t>obj
__pyx_t_3 = __Pyx_PyInt_As_byte_t(__pyx_v_obj); if (unlikely((__pyx_t_3 == ((byte_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 416, __pyx_L1_error) __pyx_v_value = ((byte_t)__pyx_t_3);
+0417: return Block_Create(address, 1, &value)
__pyx_t_4 = __pyx_f_10bytesparse_2_c_Block_Create(__pyx_v_address, 1, (&__pyx_v_value)); if (unlikely(__pyx_t_4 == ((Block_ *)NULL))) __PYX_ERR(0, 417, __pyx_L1_error)
__pyx_r = __pyx_t_4;
goto __pyx_L0;
0418: else:
+0419: try:
/*else*/ { { /*try:*/ { /* … */ } __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; goto __pyx_L9_try_end; __pyx_L4_error:; __PYX_XDEC_MEMVIEW(&__pyx_t_8, 1); /* … */ __Pyx_XGIVEREF(__pyx_t_5); __Pyx_XGIVEREF(__pyx_t_6); __Pyx_XGIVEREF(__pyx_t_7); __Pyx_ExceptionReset(__pyx_t_5, __pyx_t_6, __pyx_t_7); goto __pyx_L1_error; __pyx_L5_exception_handled:; __Pyx_XGIVEREF(__pyx_t_5); __Pyx_XGIVEREF(__pyx_t_6); __Pyx_XGIVEREF(__pyx_t_7); __Pyx_ExceptionReset(__pyx_t_5, __pyx_t_6, __pyx_t_7); __pyx_L9_try_end:; }
+0420: view = obj
__pyx_t_8 = __Pyx_PyObject_to_MemoryviewSlice_ds_nn_byte_t__const__(__pyx_v_obj, 0); if (unlikely(!__pyx_t_8.memview)) __PYX_ERR(0, 420, __pyx_L4_error) __pyx_v_view = __pyx_t_8; __pyx_t_8.memview = NULL; __pyx_t_8.data = NULL;
+0421: except TypeError:
__pyx_t_9 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_TypeError); if (__pyx_t_9) { __Pyx_AddTraceback("bytesparse._c.Block_FromObject", __pyx_clineno, __pyx_lineno, __pyx_filename); if (__Pyx_GetException(&__pyx_t_10, &__pyx_t_11, &__pyx_t_12) < 0) __PYX_ERR(0, 421, __pyx_L6_except_error) __Pyx_GOTREF(__pyx_t_10); __Pyx_GOTREF(__pyx_t_11); __Pyx_GOTREF(__pyx_t_12);
+0422: view = bytes(obj)
__pyx_t_13 = __Pyx_PyObject_CallOneArg(((PyObject *)(&PyBytes_Type)), __pyx_v_obj); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 422, __pyx_L6_except_error) __Pyx_GOTREF(__pyx_t_13); __pyx_t_8 = __Pyx_PyObject_to_MemoryviewSlice_ds_nn_byte_t__const__(__pyx_t_13, 0); if (unlikely(!__pyx_t_8.memview)) __PYX_ERR(0, 422, __pyx_L6_except_error) __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0; __PYX_XDEC_MEMVIEW(&__pyx_v_view, 1); __pyx_v_view = __pyx_t_8; __pyx_t_8.memview = NULL; __pyx_t_8.data = NULL; __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; goto __pyx_L5_exception_handled; } goto __pyx_L6_except_error; __pyx_L6_except_error:;
+0423: size = len(view)
__pyx_t_14 = __Pyx_MemoryView_Len(__pyx_v_view);
__pyx_v_size = __pyx_t_14;
+0424: if size:
__pyx_t_2 = (__pyx_v_size != 0); if (__pyx_t_2) { /* … */ }
0425: with cython.boundscheck(False):
+0426: ptr = &view[0]
__pyx_t_15 = 0; if (__pyx_t_15 < 0) __pyx_t_15 += __pyx_v_view.shape[0]; __pyx_v_ptr = (&(*((byte_t const *) ( /* dim=0 */ (__pyx_v_view.data + __pyx_t_15 * __pyx_v_view.strides[0]) ))));
+0427: return Block_Create(address, size, ptr)
__pyx_t_4 = __pyx_f_10bytesparse_2_c_Block_Create(__pyx_v_address, __pyx_v_size, __pyx_v_ptr); if (unlikely(__pyx_t_4 == ((Block_ *)NULL))) __PYX_ERR(0, 427, __pyx_L1_error)
__pyx_r = __pyx_t_4;
goto __pyx_L0;
0428: else:
+0429: if nonnull:
/*else*/ { __pyx_t_2 = (__pyx_v_nonnull != 0); if (unlikely(__pyx_t_2)) { /* … */ }
+0430: raise ValueError('invalid block data size')
__pyx_t_12 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__4, NULL); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 430, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_12); __Pyx_Raise(__pyx_t_12, 0, 0, 0); __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; __PYX_ERR(0, 430, __pyx_L1_error) /* … */ __pyx_tuple__4 = PyTuple_Pack(1, __pyx_kp_u_invalid_block_data_size); if (unlikely(!__pyx_tuple__4)) __PYX_ERR(0, 430, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__4); __Pyx_GIVEREF(__pyx_tuple__4);
0431: else:
+0432: return Block_Alloc(address, 0, False)
/*else*/ {
__pyx_t_4 = __pyx_f_10bytesparse_2_c_Block_Alloc(__pyx_v_address, 0, 0); if (unlikely(__pyx_t_4 == ((Block_ *)NULL))) __PYX_ERR(0, 432, __pyx_L1_error)
__pyx_r = __pyx_t_4;
goto __pyx_L0;
}
}
}
0433:
0434:
+0435: cdef Block_* Block_Acquire(Block_* that) except NULL:
static Block_ *__pyx_f_10bytesparse_2_c_Block_Acquire(Block_ *__pyx_v_that) { Block_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_Acquire", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.Block_Acquire", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+0436: if that:
__pyx_t_1 = (__pyx_v_that != 0); if (likely(__pyx_t_1)) { /* … */ }
+0437: if that.references < SIZE_MAX:
__pyx_t_1 = ((__pyx_v_that->references < SIZE_MAX) != 0); if (likely(__pyx_t_1)) { /* … */ }
+0438: that.references += 1
__pyx_v_that->references = (__pyx_v_that->references + 1);
+0439: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
0440: else:
+0441: raise OverflowError()
/*else*/ { __pyx_t_2 = __Pyx_PyObject_CallNoArg(__pyx_builtin_OverflowError); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 441, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 441, __pyx_L1_error) }
0442: else:
+0443: raise RuntimeError('null pointer')
/*else*/ { __pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_RuntimeError, __pyx_tuple__3, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 443, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 443, __pyx_L1_error) }
0444:
0445:
+0446: cdef Block_* Block_Release_(Block_* that):
static Block_ *__pyx_f_10bytesparse_2_c_Block_Release_(Block_ *__pyx_v_that) { Block_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_Release_", 0); /* … */ /* function exit code */ __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+0447: if that:
__pyx_t_1 = (__pyx_v_that != 0); if (__pyx_t_1) { /* … */ }
+0448: if that.references:
__pyx_t_1 = (__pyx_v_that->references != 0); if (__pyx_t_1) { /* … */ }
+0449: that.references -= 1
__pyx_v_that->references = (__pyx_v_that->references - 1);
0450:
+0451: if that.references:
__pyx_t_1 = (__pyx_v_that->references != 0); if (__pyx_t_1) { /* … */ }
+0452: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
0453: else:
+0454: PyMem_Free(that)
/*else*/ {
PyMem_Free(__pyx_v_that);
}
0455:
+0456: return NULL
__pyx_r = NULL; goto __pyx_L0;
0457:
0458:
+0459: cdef Block_* Block_Release(Block_* that):
static Block_ *__pyx_f_10bytesparse_2_c_Block_Release(Block_ *__pyx_v_that) { Block_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_Release", 0); /* … */ /* function exit code */ __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+0460: if that:
__pyx_t_1 = (__pyx_v_that != 0); if (__pyx_t_1) { /* … */ }
+0461: if that.references:
__pyx_t_1 = (__pyx_v_that->references != 0); if (__pyx_t_1) { /* … */ }
+0462: that.references -= 1
__pyx_v_that->references = (__pyx_v_that->references - 1);
0463:
+0464: if not that.references:
__pyx_t_1 = ((!(__pyx_v_that->references != 0)) != 0); if (__pyx_t_1) { /* … */ }
+0465: PyMem_Free(that)
PyMem_Free(__pyx_v_that);
0466:
+0467: return NULL
__pyx_r = NULL; goto __pyx_L0;
0468:
0469:
+0470: cdef size_t Block_Length(const Block_* that) nogil:
static size_t __pyx_f_10bytesparse_2_c_Block_Length(Block_ const *__pyx_v_that) { size_t __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
+0471: return that.endex - that.start
__pyx_r = (__pyx_v_that->endex - __pyx_v_that->start); goto __pyx_L0;
0472:
0473:
+0474: cdef addr_t Block_Start(const Block_* that) nogil:
static addr_t __pyx_f_10bytesparse_2_c_Block_Start(Block_ const *__pyx_v_that) { addr_t __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
+0475: return that.address
__pyx_r = __pyx_v_that->address; goto __pyx_L0;
0476:
0477:
+0478: cdef addr_t Block_Endex(const Block_* that) nogil:
static addr_t __pyx_f_10bytesparse_2_c_Block_Endex(Block_ const *__pyx_v_that) { addr_t __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
+0479: return that.address + (that.endex - that.start)
__pyx_r = (__pyx_v_that->address + (__pyx_v_that->endex - __pyx_v_that->start)); goto __pyx_L0;
0480:
0481:
+0482: cdef addr_t Block_Endin(const Block_* that) nogil:
static addr_t __pyx_f_10bytesparse_2_c_Block_Endin(Block_ const *__pyx_v_that) { addr_t __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
+0483: return that.address + (that.endex - that.start) - 1
__pyx_r = ((__pyx_v_that->address + (__pyx_v_that->endex - __pyx_v_that->start)) - 1); goto __pyx_L0;
0484:
0485:
+0486: cdef addr_t Block_BoundAddress(const Block_* that, addr_t address) nogil:
static addr_t __pyx_f_10bytesparse_2_c_Block_BoundAddress(Block_ const *__pyx_v_that, addr_t __pyx_v_address) { addr_t __pyx_v_block_start; addr_t __pyx_v_block_endex; addr_t __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
0487: cdef:
+0488: addr_t block_start = that.address
__pyx_t_1 = __pyx_v_that->address; __pyx_v_block_start = __pyx_t_1;
+0489: addr_t block_endex = block_start + that.endex - that.start
__pyx_v_block_endex = ((__pyx_v_block_start + __pyx_v_that->endex) - __pyx_v_that->start);
0490:
+0491: if address < block_start:
__pyx_t_2 = ((__pyx_v_address < __pyx_v_block_start) != 0); if (__pyx_t_2) { /* … */ goto __pyx_L3; }
+0492: address = block_start # trim to start
__pyx_v_address = __pyx_v_block_start;
+0493: elif address > block_endex:
__pyx_t_2 = ((__pyx_v_address > __pyx_v_block_endex) != 0); if (__pyx_t_2) { /* … */ } __pyx_L3:;
+0494: address = block_endex # trim to end
__pyx_v_address = __pyx_v_block_endex;
+0495: return address
__pyx_r = __pyx_v_address; goto __pyx_L0;
0496:
0497:
+0498: cdef size_t Block_BoundAddressToOffset(const Block_* that, addr_t address) nogil:
static size_t __pyx_f_10bytesparse_2_c_Block_BoundAddressToOffset(Block_ const *__pyx_v_that, addr_t __pyx_v_address) { addr_t __pyx_v_block_start; addr_t __pyx_v_block_endex; size_t __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
0499: cdef:
+0500: addr_t block_start = that.address
__pyx_t_1 = __pyx_v_that->address; __pyx_v_block_start = __pyx_t_1;
+0501: addr_t block_endex = block_start + that.endex - that.start
__pyx_v_block_endex = ((__pyx_v_block_start + __pyx_v_that->endex) - __pyx_v_that->start);
0502:
+0503: if address < block_start:
__pyx_t_2 = ((__pyx_v_address < __pyx_v_block_start) != 0); if (__pyx_t_2) { /* … */ goto __pyx_L3; }
+0504: address = block_start # trim to start
__pyx_v_address = __pyx_v_block_start;
+0505: elif address > block_endex:
__pyx_t_2 = ((__pyx_v_address > __pyx_v_block_endex) != 0); if (__pyx_t_2) { /* … */ } __pyx_L3:;
+0506: address = block_endex # trim to end
__pyx_v_address = __pyx_v_block_endex;
+0507: return <size_t>(address - block_start)
__pyx_r = ((size_t)(__pyx_v_address - __pyx_v_block_start)); goto __pyx_L0;
0508:
0509:
+0510: cdef size_t Block_BoundOffset(const Block_* that, size_t offset) nogil:
static size_t __pyx_f_10bytesparse_2_c_Block_BoundOffset(Block_ const *__pyx_v_that, size_t __pyx_v_offset) { size_t __pyx_v_size; size_t __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
0511: cdef:
+0512: size_t size = that.endex - that.start
__pyx_v_size = (__pyx_v_that->endex - __pyx_v_that->start);
0513:
+0514: if offset > size:
__pyx_t_1 = ((__pyx_v_offset > __pyx_v_size) != 0); if (__pyx_t_1) { /* … */ }
+0515: offset = size # trim to end
__pyx_v_offset = __pyx_v_size;
+0516: return offset
__pyx_r = __pyx_v_offset; goto __pyx_L0;
0517:
0518:
+0519: cdef (addr_t, addr_t) Block_BoundAddressSlice(const Block_* that, addr_t start, addr_t endex) nogil:
static __pyx_ctuple_addr_t__and_addr_t __pyx_f_10bytesparse_2_c_Block_BoundAddressSlice(Block_ const *__pyx_v_that, addr_t __pyx_v_start, addr_t __pyx_v_endex) { addr_t __pyx_v_block_start; addr_t __pyx_v_block_endex; __pyx_ctuple_addr_t__and_addr_t __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
0520: cdef:
+0521: addr_t block_start = that.address
__pyx_t_1 = __pyx_v_that->address; __pyx_v_block_start = __pyx_t_1;
+0522: addr_t block_endex = block_start + (that.endex - that.start)
__pyx_v_block_endex = (__pyx_v_block_start + (__pyx_v_that->endex - __pyx_v_that->start));
0523:
+0524: if start < block_start:
__pyx_t_2 = ((__pyx_v_start < __pyx_v_block_start) != 0); if (__pyx_t_2) { /* … */ goto __pyx_L3; }
+0525: start = block_start # trim to start
__pyx_v_start = __pyx_v_block_start;
+0526: elif start > block_endex:
__pyx_t_2 = ((__pyx_v_start > __pyx_v_block_endex) != 0); if (__pyx_t_2) { /* … */ } __pyx_L3:;
+0527: start = block_endex # trim to end
__pyx_v_start = __pyx_v_block_endex;
0528:
+0529: if endex < block_start:
__pyx_t_2 = ((__pyx_v_endex < __pyx_v_block_start) != 0); if (__pyx_t_2) { /* … */ goto __pyx_L4; }
+0530: endex = block_start # trim to start
__pyx_v_endex = __pyx_v_block_start;
+0531: elif endex > block_endex:
__pyx_t_2 = ((__pyx_v_endex > __pyx_v_block_endex) != 0); if (__pyx_t_2) { /* … */ } __pyx_L4:;
+0532: endex = block_endex # trim to end
__pyx_v_endex = __pyx_v_block_endex;
0533:
+0534: if endex < start:
__pyx_t_2 = ((__pyx_v_endex < __pyx_v_start) != 0); if (__pyx_t_2) { /* … */ }
+0535: endex = start # clamp negative length
__pyx_v_endex = __pyx_v_start;
0536:
+0537: return start, endex
__pyx_t_3.f0 = __pyx_v_start; __pyx_t_3.f1 = __pyx_v_endex; __pyx_r = __pyx_t_3; goto __pyx_L0;
0538:
0539:
+0540: cdef (size_t, size_t) Block_BoundAddressSliceToOffset(const Block_* that, addr_t start, addr_t endex) nogil:
static __pyx_ctuple_size_t__and_size_t __pyx_f_10bytesparse_2_c_Block_BoundAddressSliceToOffset(Block_ const *__pyx_v_that, addr_t __pyx_v_start, addr_t __pyx_v_endex) { addr_t __pyx_v_block_start; addr_t __pyx_v_block_endex; __pyx_ctuple_size_t__and_size_t __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
0541: cdef:
+0542: addr_t block_start = that.address
__pyx_t_1 = __pyx_v_that->address; __pyx_v_block_start = __pyx_t_1;
+0543: addr_t block_endex = block_start + (that.endex - that.start)
__pyx_v_block_endex = (__pyx_v_block_start + (__pyx_v_that->endex - __pyx_v_that->start));
0544:
+0545: if start < block_start:
__pyx_t_2 = ((__pyx_v_start < __pyx_v_block_start) != 0); if (__pyx_t_2) { /* … */ goto __pyx_L3; }
+0546: start = block_start # trim to start
__pyx_v_start = __pyx_v_block_start;
+0547: elif start > block_endex:
__pyx_t_2 = ((__pyx_v_start > __pyx_v_block_endex) != 0); if (__pyx_t_2) { /* … */ } __pyx_L3:;
+0548: start = block_endex # trim to end
__pyx_v_start = __pyx_v_block_endex;
0549:
+0550: if endex < block_start:
__pyx_t_2 = ((__pyx_v_endex < __pyx_v_block_start) != 0); if (__pyx_t_2) { /* … */ goto __pyx_L4; }
+0551: endex = block_start # trim to start
__pyx_v_endex = __pyx_v_block_start;
+0552: elif endex > block_endex:
__pyx_t_2 = ((__pyx_v_endex > __pyx_v_block_endex) != 0); if (__pyx_t_2) { /* … */ } __pyx_L4:;
+0553: endex = block_endex # trim to end
__pyx_v_endex = __pyx_v_block_endex;
0554:
+0555: if endex < start:
__pyx_t_2 = ((__pyx_v_endex < __pyx_v_start) != 0); if (__pyx_t_2) { /* … */ }
+0556: endex = start # clamp negative length
__pyx_v_endex = __pyx_v_start;
0557:
+0558: return <size_t>(start - block_start), <size_t>(endex - block_start)
__pyx_t_3.f0 = ((size_t)(__pyx_v_start - __pyx_v_block_start)); __pyx_t_3.f1 = ((size_t)(__pyx_v_endex - __pyx_v_block_start)); __pyx_r = __pyx_t_3; goto __pyx_L0;
0559:
0560:
+0561: cdef (size_t, size_t) Block_BoundOffsetSlice(const Block_* that, size_t start, size_t endex) nogil:
static __pyx_ctuple_size_t__and_size_t __pyx_f_10bytesparse_2_c_Block_BoundOffsetSlice(Block_ const *__pyx_v_that, size_t __pyx_v_start, size_t __pyx_v_endex) { size_t __pyx_v_size; __pyx_ctuple_size_t__and_size_t __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
0562: cdef:
+0563: size_t size = that.endex - that.start
__pyx_v_size = (__pyx_v_that->endex - __pyx_v_that->start);
0564:
+0565: if start > size:
__pyx_t_1 = ((__pyx_v_start > __pyx_v_size) != 0); if (__pyx_t_1) { /* … */ }
+0566: start = size # trim to end
__pyx_v_start = __pyx_v_size;
0567:
+0568: if endex > size:
__pyx_t_1 = ((__pyx_v_endex > __pyx_v_size) != 0); if (__pyx_t_1) { /* … */ }
+0569: endex = size # trim to end
__pyx_v_endex = __pyx_v_size;
0570:
+0571: if endex < start:
__pyx_t_1 = ((__pyx_v_endex < __pyx_v_start) != 0); if (__pyx_t_1) { /* … */ }
+0572: endex = start # clamp negative length
__pyx_v_endex = __pyx_v_start;
0573:
+0574: return start, endex
__pyx_t_2.f0 = __pyx_v_start; __pyx_t_2.f1 = __pyx_v_endex; __pyx_r = __pyx_t_2; goto __pyx_L0;
0575:
0576:
+0577: cdef vint Block_CheckMutable(const Block_* that) except -1:
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_Block_CheckMutable(Block_ const *__pyx_v_that) { __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_CheckMutable", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.Block_CheckMutable", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+0578: if that.references > 1:
__pyx_t_1 = ((__pyx_v_that->references > 1) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+0579: raise RuntimeError('Existing exports of data: object cannot be re-sized')
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_RuntimeError, __pyx_tuple__5, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 579, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 579, __pyx_L1_error) /* … */ __pyx_tuple__5 = PyTuple_Pack(1, __pyx_kp_u_Existing_exports_of_data_object); if (unlikely(!__pyx_tuple__5)) __PYX_ERR(0, 579, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__5); __Pyx_GIVEREF(__pyx_tuple__5);
0580:
0581:
+0582: cdef bint Block_Eq_(const Block_* that, size_t size, const byte_t* buffer) nogil:
static int __pyx_f_10bytesparse_2_c_Block_Eq_(Block_ const *__pyx_v_that, size_t __pyx_v_size, byte_t const *__pyx_v_buffer) { int __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
+0583: if size != that.endex - that.start:
__pyx_t_1 = ((__pyx_v_size != (__pyx_v_that->endex - __pyx_v_that->start)) != 0); if (__pyx_t_1) { /* … */ }
+0584: return False
__pyx_r = 0; goto __pyx_L0;
0585:
+0586: if size:
__pyx_t_1 = (__pyx_v_size != 0); if (__pyx_t_1) { /* … */ }
+0587: if memcmp(&that.data[that.start], buffer, size):
__pyx_t_1 = (memcmp((&(__pyx_v_that->data[__pyx_v_that->start])), __pyx_v_buffer, __pyx_v_size) != 0); if (__pyx_t_1) { /* … */ }
+0588: return False
__pyx_r = 0; goto __pyx_L0;
0589:
+0590: return True
__pyx_r = 1; goto __pyx_L0;
0591:
0592:
+0593: cdef bint Block_Eq(const Block_* that, const Block_* other) nogil:
static int __pyx_f_10bytesparse_2_c_Block_Eq(Block_ const *__pyx_v_that, Block_ const *__pyx_v_other) { int __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
0594: # if that.address != other.address:
0595: # return False
0596:
+0597: return Block_Eq_(that, other.endex - other.start, &other.data[other.start])
__pyx_r = __pyx_f_10bytesparse_2_c_Block_Eq_(__pyx_v_that, (__pyx_v_other->endex - __pyx_v_other->start), (&(__pyx_v_other->data[__pyx_v_other->start]))); goto __pyx_L0;
0598:
0599:
+0600: cdef int Block_Cmp_(const Block_* that, size_t size, const byte_t* buffer) nogil:
static int __pyx_f_10bytesparse_2_c_Block_Cmp_(Block_ const *__pyx_v_that, size_t __pyx_v_size, byte_t const *__pyx_v_buffer) { size_t __pyx_v_size2; size_t __pyx_v_minsize; byte_t const *__pyx_v_buffer2; int __pyx_v_sign; int __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
0601: cdef:
+0602: size_t size2 = that.endex - that.start
__pyx_v_size2 = (__pyx_v_that->endex - __pyx_v_that->start);
+0603: size_t minsize = size2 if size2 < size else size
if (((__pyx_v_size2 < __pyx_v_size) != 0)) { __pyx_t_1 = __pyx_v_size2; } else { __pyx_t_1 = __pyx_v_size; } __pyx_v_minsize = __pyx_t_1;
+0604: const byte_t* buffer2 = &that.data[that.start]
__pyx_v_buffer2 = (&(__pyx_v_that->data[__pyx_v_that->start]));
+0605: int sign = memcmp(buffer2, buffer, minsize)
__pyx_v_sign = memcmp(__pyx_v_buffer2, __pyx_v_buffer, __pyx_v_minsize);
0606:
+0607: if size2 == size:
__pyx_t_2 = ((__pyx_v_size2 == __pyx_v_size) != 0); if (__pyx_t_2) { /* … */ }
+0608: return sign
__pyx_r = __pyx_v_sign; goto __pyx_L0;
+0609: elif sign:
__pyx_t_2 = (__pyx_v_sign != 0); if (__pyx_t_2) { /* … */ }
+0610: return sign
__pyx_r = __pyx_v_sign; goto __pyx_L0;
0611: else:
+0612: return -1 if size2 < size else +1
/*else*/ { if (((__pyx_v_size2 < __pyx_v_size) != 0)) { __pyx_t_3 = -1; } else { __pyx_t_3 = 1; } __pyx_r = __pyx_t_3; goto __pyx_L0; }
0613:
0614:
+0615: cdef int Block_Cmp(const Block_* that, const Block_* other) nogil:
static int __pyx_f_10bytesparse_2_c_Block_Cmp(Block_ const *__pyx_v_that, Block_ const *__pyx_v_other) { int __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
0616: # if that.address != other.address:
0617: # return -1 if that.address < other.address else +1
0618:
+0619: return Block_Cmp_(that, other.endex - other.start, &other.data[other.start])
__pyx_r = __pyx_f_10bytesparse_2_c_Block_Cmp_(__pyx_v_that, (__pyx_v_other->endex - __pyx_v_other->start), (&(__pyx_v_other->data[__pyx_v_other->start]))); goto __pyx_L0;
0620:
0621:
+0622: cdef ssize_t Block_Find__(const Block_* that, size_t start, size_t endex, byte_t value) nogil:
static Py_ssize_t __pyx_f_10bytesparse_2_c_Block_Find__(Block_ const *__pyx_v_that, size_t __pyx_v_start, size_t __pyx_v_endex, byte_t __pyx_v_value) { size_t __pyx_v_size; byte_t const *__pyx_v_ptr; byte_t const *__pyx_v_end; Py_ssize_t __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
0623: cdef:
+0624: size_t size = that.endex - that.start
__pyx_v_size = (__pyx_v_that->endex - __pyx_v_that->start);
0625: const byte_t* ptr
0626: const byte_t* end
0627:
+0628: if start > size:
__pyx_t_1 = ((__pyx_v_start > __pyx_v_size) != 0); if (__pyx_t_1) { /* … */ }
+0629: start = size # trim to end
__pyx_v_start = __pyx_v_size;
+0630: if endex > size:
__pyx_t_1 = ((__pyx_v_endex > __pyx_v_size) != 0); if (__pyx_t_1) { /* … */ }
+0631: endex = size # trim to end
__pyx_v_endex = __pyx_v_size;
+0632: if endex < start:
__pyx_t_1 = ((__pyx_v_endex < __pyx_v_start) != 0); if (__pyx_t_1) { /* … */ }
+0633: endex = start # clamp negative length
__pyx_v_endex = __pyx_v_start;
0634:
+0635: ptr = &that.data[that.start + start]
__pyx_v_ptr = (&(__pyx_v_that->data[(__pyx_v_that->start + __pyx_v_start)]));
+0636: end = &that.data[that.start + endex]
__pyx_v_end = (&(__pyx_v_that->data[(__pyx_v_that->start + __pyx_v_endex)]));
0637:
+0638: while ptr != end:
while (1) { __pyx_t_1 = ((__pyx_v_ptr != __pyx_v_end) != 0); if (!__pyx_t_1) break;
+0639: if ptr[0] == value:
__pyx_t_1 = (((__pyx_v_ptr[0]) == __pyx_v_value) != 0); if (__pyx_t_1) { /* … */ }
+0640: return <ssize_t>(<ptrdiff_t>ptr - <ptrdiff_t>&that.data[that.start])
__pyx_r = ((Py_ssize_t)(((ptrdiff_t)__pyx_v_ptr) - ((ptrdiff_t)(&(__pyx_v_that->data[__pyx_v_that->start]))))); goto __pyx_L0;
+0641: ptr += 1
__pyx_v_ptr = (__pyx_v_ptr + 1); }
+0642: return -1
__pyx_r = -1L; goto __pyx_L0;
0643:
0644:
+0645: cdef ssize_t Block_Find_(const Block_* that, size_t start, size_t endex,
static Py_ssize_t __pyx_f_10bytesparse_2_c_Block_Find_(Block_ const *__pyx_v_that, size_t __pyx_v_start, size_t __pyx_v_endex, size_t __pyx_v_size, byte_t const *__pyx_v_buffer) { size_t __pyx_v_size2; byte_t const *__pyx_v_ptr; byte_t const *__pyx_v_end; Py_ssize_t __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
0646: size_t size, const byte_t* buffer) nogil:
0647: cdef:
0648: size_t size2
0649: const byte_t* ptr
0650: const byte_t* end
0651:
+0652: if size == 1: # faster code for single byte
__pyx_t_1 = ((__pyx_v_size == 1) != 0); if (__pyx_t_1) { /* … */ }
+0653: return Block_Find__(that, start, endex, buffer[0])
__pyx_r = __pyx_f_10bytesparse_2_c_Block_Find__(__pyx_v_that, __pyx_v_start, __pyx_v_endex, (__pyx_v_buffer[0])); goto __pyx_L0;
0654:
+0655: elif size:
__pyx_t_1 = (__pyx_v_size != 0); if (__pyx_t_1) { /* … */ }
+0656: size2 = that.endex - that.start
__pyx_v_size2 = (__pyx_v_that->endex - __pyx_v_that->start);
0657:
+0658: if start > size2:
__pyx_t_1 = ((__pyx_v_start > __pyx_v_size2) != 0); if (__pyx_t_1) { /* … */ }
+0659: start = size2 # trim to end
__pyx_v_start = __pyx_v_size2;
+0660: if endex > size2:
__pyx_t_1 = ((__pyx_v_endex > __pyx_v_size2) != 0); if (__pyx_t_1) { /* … */ }
+0661: endex = size2 # trim to end
__pyx_v_endex = __pyx_v_size2;
+0662: if endex < start:
__pyx_t_1 = ((__pyx_v_endex < __pyx_v_start) != 0); if (__pyx_t_1) { /* … */ }
+0663: endex = start # clamp negative length
__pyx_v_endex = __pyx_v_start;
0664:
+0665: if size <= size2 and size <= endex - start:
__pyx_t_2 = ((__pyx_v_size <= __pyx_v_size2) != 0); if (__pyx_t_2) { } else { __pyx_t_1 = __pyx_t_2; goto __pyx_L8_bool_binop_done; } __pyx_t_2 = ((__pyx_v_size <= (__pyx_v_endex - __pyx_v_start)) != 0); __pyx_t_1 = __pyx_t_2; __pyx_L8_bool_binop_done:; if (__pyx_t_1) { /* … */ }
+0666: size2 = endex - size + 1
__pyx_v_size2 = ((__pyx_v_endex - __pyx_v_size) + 1);
0667:
+0668: if start > size2:
__pyx_t_1 = ((__pyx_v_start > __pyx_v_size2) != 0); if (__pyx_t_1) { /* … */ }
+0669: start = size2 # trim to end
__pyx_v_start = __pyx_v_size2;
+0670: if endex > size2:
__pyx_t_1 = ((__pyx_v_endex > __pyx_v_size2) != 0); if (__pyx_t_1) { /* … */ }
+0671: endex = size2 # trim to end
__pyx_v_endex = __pyx_v_size2;
0672:
+0673: ptr = &that.data[that.start + start]
__pyx_v_ptr = (&(__pyx_v_that->data[(__pyx_v_that->start + __pyx_v_start)]));
+0674: end = &that.data[that.start + endex]
__pyx_v_end = (&(__pyx_v_that->data[(__pyx_v_that->start + __pyx_v_endex)]));
0675:
+0676: while ptr != end:
while (1) { __pyx_t_1 = ((__pyx_v_ptr != __pyx_v_end) != 0); if (!__pyx_t_1) break;
+0677: if ptr[0] == buffer[0]: # faster pruning
__pyx_t_1 = (((__pyx_v_ptr[0]) == (__pyx_v_buffer[0])) != 0); if (__pyx_t_1) { /* … */ }
+0678: if not memcmp(ptr, buffer, size):
__pyx_t_1 = ((!(memcmp(__pyx_v_ptr, __pyx_v_buffer, __pyx_v_size) != 0)) != 0); if (__pyx_t_1) { /* … */ }
+0679: return <ssize_t>(<ptrdiff_t>ptr - <ptrdiff_t>&that.data[that.start])
__pyx_r = ((Py_ssize_t)(((ptrdiff_t)__pyx_v_ptr) - ((ptrdiff_t)(&(__pyx_v_that->data[__pyx_v_that->start]))))); goto __pyx_L0;
+0680: ptr += 1
__pyx_v_ptr = (__pyx_v_ptr + 1); }
+0681: return -1
__pyx_r = -1L; goto __pyx_L0;
0682:
0683:
+0684: cdef ssize_t Block_Find(const Block_* that, ssize_t start, ssize_t endex,
static Py_ssize_t __pyx_f_10bytesparse_2_c_Block_Find(Block_ const *__pyx_v_that, Py_ssize_t __pyx_v_start, Py_ssize_t __pyx_v_endex, size_t __pyx_v_size, byte_t const *__pyx_v_buffer) { Py_ssize_t __pyx_v_ssize; Py_ssize_t __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
0685: size_t size, const byte_t* buffer) nogil:
0686: cdef:
+0687: ssize_t ssize = <ssize_t>(that.endex - that.start)
__pyx_v_ssize = ((Py_ssize_t)(__pyx_v_that->endex - __pyx_v_that->start));
0688:
+0689: if start < 0:
__pyx_t_1 = ((__pyx_v_start < 0) != 0); if (__pyx_t_1) { /* … */ }
+0690: start += ssize # anchor to end
__pyx_v_start = (__pyx_v_start + __pyx_v_ssize);
+0691: if start < 0:
__pyx_t_1 = ((__pyx_v_start < 0) != 0); if (__pyx_t_1) { /* … */ }
+0692: start = 0 # trim to start
__pyx_v_start = 0;
0693:
+0694: if endex < 0:
__pyx_t_1 = ((__pyx_v_endex < 0) != 0); if (__pyx_t_1) { /* … */ }
+0695: endex += ssize # anchor to end
__pyx_v_endex = (__pyx_v_endex + __pyx_v_ssize);
+0696: if endex < 0:
__pyx_t_1 = ((__pyx_v_endex < 0) != 0); if (__pyx_t_1) { /* … */ }
+0697: endex = 0 # trim to start
__pyx_v_endex = 0;
0698:
+0699: return Block_Find_(that, <size_t>start, <size_t>endex, size, buffer)
__pyx_r = __pyx_f_10bytesparse_2_c_Block_Find_(__pyx_v_that, ((size_t)__pyx_v_start), ((size_t)__pyx_v_endex), __pyx_v_size, __pyx_v_buffer); goto __pyx_L0;
0700:
0701:
+0702: cdef ssize_t Block_ReverseFind__(const Block_* that, size_t start, size_t endex, byte_t value) nogil:
static Py_ssize_t __pyx_f_10bytesparse_2_c_Block_ReverseFind__(Block_ const *__pyx_v_that, size_t __pyx_v_start, size_t __pyx_v_endex, byte_t __pyx_v_value) { size_t __pyx_v_size; byte_t const *__pyx_v_ptr; byte_t const *__pyx_v_end; Py_ssize_t __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
0703: cdef:
+0704: size_t size = that.endex - that.start
__pyx_v_size = (__pyx_v_that->endex - __pyx_v_that->start);
0705: const byte_t* ptr
0706: const byte_t* end
0707:
+0708: if size:
__pyx_t_1 = (__pyx_v_size != 0); if (__pyx_t_1) { /* … */ }
+0709: if start > size:
__pyx_t_1 = ((__pyx_v_start > __pyx_v_size) != 0); if (__pyx_t_1) { /* … */ }
+0710: start = size # trim to end
__pyx_v_start = __pyx_v_size;
+0711: if endex > size:
__pyx_t_1 = ((__pyx_v_endex > __pyx_v_size) != 0); if (__pyx_t_1) { /* … */ }
+0712: endex = size # trim to end
__pyx_v_endex = __pyx_v_size;
+0713: if endex < start:
__pyx_t_1 = ((__pyx_v_endex < __pyx_v_start) != 0); if (__pyx_t_1) { /* … */ }
+0714: endex = start # clamp negative length
__pyx_v_endex = __pyx_v_start;
0715:
+0716: end = &that.data[that.start + start]
__pyx_v_end = (&(__pyx_v_that->data[(__pyx_v_that->start + __pyx_v_start)]));
+0717: ptr = &that.data[that.start + endex]
__pyx_v_ptr = (&(__pyx_v_that->data[(__pyx_v_that->start + __pyx_v_endex)]));
0718:
+0719: while ptr != end:
while (1) { __pyx_t_1 = ((__pyx_v_ptr != __pyx_v_end) != 0); if (!__pyx_t_1) break;
+0720: ptr -= 1
__pyx_v_ptr = (__pyx_v_ptr - 1);
+0721: if ptr[0] == value:
__pyx_t_1 = (((__pyx_v_ptr[0]) == __pyx_v_value) != 0); if (__pyx_t_1) { /* … */ } }
+0722: return <ssize_t>(<ptrdiff_t>ptr - <ptrdiff_t>&that.data[that.start])
__pyx_r = ((Py_ssize_t)(((ptrdiff_t)__pyx_v_ptr) - ((ptrdiff_t)(&(__pyx_v_that->data[__pyx_v_that->start]))))); goto __pyx_L0;
+0723: return -1
__pyx_r = -1L; goto __pyx_L0;
0724:
0725:
+0726: cdef ssize_t Block_ReverseFind_(const Block_* that, size_t start, size_t endex,
static Py_ssize_t __pyx_f_10bytesparse_2_c_Block_ReverseFind_(Block_ const *__pyx_v_that, size_t __pyx_v_start, size_t __pyx_v_endex, size_t __pyx_v_size, byte_t const *__pyx_v_buffer) { size_t __pyx_v_size2; byte_t const *__pyx_v_ptr; byte_t const *__pyx_v_end; Py_ssize_t __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
0727: size_t size, const byte_t* buffer) nogil:
0728: cdef:
0729: size_t size2
0730: const byte_t* ptr
0731: const byte_t* end
0732:
+0733: if size == 1: # faster code for single byte
__pyx_t_1 = ((__pyx_v_size == 1) != 0); if (__pyx_t_1) { /* … */ }
+0734: return Block_ReverseFind__(that, start, endex, buffer[0])
__pyx_r = __pyx_f_10bytesparse_2_c_Block_ReverseFind__(__pyx_v_that, __pyx_v_start, __pyx_v_endex, (__pyx_v_buffer[0])); goto __pyx_L0;
0735:
+0736: elif size:
__pyx_t_1 = (__pyx_v_size != 0); if (__pyx_t_1) { /* … */ }
+0737: size2 = that.endex - that.start
__pyx_v_size2 = (__pyx_v_that->endex - __pyx_v_that->start);
0738:
+0739: if start > size2:
__pyx_t_1 = ((__pyx_v_start > __pyx_v_size2) != 0); if (__pyx_t_1) { /* … */ }
+0740: start = size2 # trim to end
__pyx_v_start = __pyx_v_size2;
+0741: if endex > size2:
__pyx_t_1 = ((__pyx_v_endex > __pyx_v_size2) != 0); if (__pyx_t_1) { /* … */ }
+0742: endex = size2 # trim to end
__pyx_v_endex = __pyx_v_size2;
+0743: if endex < start:
__pyx_t_1 = ((__pyx_v_endex < __pyx_v_start) != 0); if (__pyx_t_1) { /* … */ }
+0744: endex = start # clamp negative length
__pyx_v_endex = __pyx_v_start;
0745:
+0746: if size <= size2 and size <= endex - start:
__pyx_t_2 = ((__pyx_v_size <= __pyx_v_size2) != 0); if (__pyx_t_2) { } else { __pyx_t_1 = __pyx_t_2; goto __pyx_L8_bool_binop_done; } __pyx_t_2 = ((__pyx_v_size <= (__pyx_v_endex - __pyx_v_start)) != 0); __pyx_t_1 = __pyx_t_2; __pyx_L8_bool_binop_done:; if (__pyx_t_1) { /* … */ }
+0747: size2 = endex - size + 1
__pyx_v_size2 = ((__pyx_v_endex - __pyx_v_size) + 1);
0748:
+0749: if start > size2:
__pyx_t_1 = ((__pyx_v_start > __pyx_v_size2) != 0); if (__pyx_t_1) { /* … */ }
+0750: start = size2 # trim to end
__pyx_v_start = __pyx_v_size2;
+0751: if endex > size2:
__pyx_t_1 = ((__pyx_v_endex > __pyx_v_size2) != 0); if (__pyx_t_1) { /* … */ }
+0752: endex = size2 # trim to end
__pyx_v_endex = __pyx_v_size2;
0753:
+0754: end = &that.data[that.start + start]
__pyx_v_end = (&(__pyx_v_that->data[(__pyx_v_that->start + __pyx_v_start)]));
+0755: ptr = &that.data[that.start + endex]
__pyx_v_ptr = (&(__pyx_v_that->data[(__pyx_v_that->start + __pyx_v_endex)]));
0756:
+0757: while ptr != end:
while (1) { __pyx_t_1 = ((__pyx_v_ptr != __pyx_v_end) != 0); if (!__pyx_t_1) break;
+0758: ptr -= 1
__pyx_v_ptr = (__pyx_v_ptr - 1);
+0759: if ptr[0] == buffer[0]: # faster pruning
__pyx_t_1 = (((__pyx_v_ptr[0]) == (__pyx_v_buffer[0])) != 0); if (__pyx_t_1) { /* … */ } }
+0760: if not memcmp(ptr, buffer, size):
__pyx_t_1 = ((!(memcmp(__pyx_v_ptr, __pyx_v_buffer, __pyx_v_size) != 0)) != 0); if (__pyx_t_1) { /* … */ }
+0761: return <ssize_t>(<ptrdiff_t>ptr - <ptrdiff_t>&that.data[that.start])
__pyx_r = ((Py_ssize_t)(((ptrdiff_t)__pyx_v_ptr) - ((ptrdiff_t)(&(__pyx_v_that->data[__pyx_v_that->start]))))); goto __pyx_L0;
+0762: return -1
__pyx_r = -1L; goto __pyx_L0;
0763:
0764:
+0765: cdef ssize_t Block_ReverseFind(const Block_* that, ssize_t start, ssize_t endex,
static Py_ssize_t __pyx_f_10bytesparse_2_c_Block_ReverseFind(Block_ const *__pyx_v_that, Py_ssize_t __pyx_v_start, Py_ssize_t __pyx_v_endex, size_t __pyx_v_size, byte_t const *__pyx_v_buffer) { Py_ssize_t __pyx_v_ssize; Py_ssize_t __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
0766: size_t size, const byte_t* buffer) nogil:
0767: cdef:
+0768: ssize_t ssize = <ssize_t>(that.endex - that.start)
__pyx_v_ssize = ((Py_ssize_t)(__pyx_v_that->endex - __pyx_v_that->start));
0769:
+0770: if start < 0:
__pyx_t_1 = ((__pyx_v_start < 0) != 0); if (__pyx_t_1) { /* … */ }
+0771: start += ssize # anchor to end
__pyx_v_start = (__pyx_v_start + __pyx_v_ssize);
+0772: if start < 0:
__pyx_t_1 = ((__pyx_v_start < 0) != 0); if (__pyx_t_1) { /* … */ }
+0773: start = 0 # trim to start
__pyx_v_start = 0;
0774:
+0775: if endex < 0:
__pyx_t_1 = ((__pyx_v_endex < 0) != 0); if (__pyx_t_1) { /* … */ }
+0776: endex += ssize # anchor to end
__pyx_v_endex = (__pyx_v_endex + __pyx_v_ssize);
+0777: if endex < 0:
__pyx_t_1 = ((__pyx_v_endex < 0) != 0); if (__pyx_t_1) { /* … */ }
+0778: endex = 0 # trim to start
__pyx_v_endex = 0;
0779:
+0780: return Block_ReverseFind_(that, <size_t>start, <size_t>endex, size, buffer)
__pyx_r = __pyx_f_10bytesparse_2_c_Block_ReverseFind_(__pyx_v_that, ((size_t)__pyx_v_start), ((size_t)__pyx_v_endex), __pyx_v_size, __pyx_v_buffer); goto __pyx_L0;
0781:
0782:
+0783: cdef size_t Block_Count__(const Block_* that, size_t start, size_t endex, byte_t value) nogil:
static size_t __pyx_f_10bytesparse_2_c_Block_Count__(Block_ const *__pyx_v_that, size_t __pyx_v_start, size_t __pyx_v_endex, byte_t __pyx_v_value) { size_t __pyx_v_count; size_t __pyx_v_size; byte_t const *__pyx_v_ptr; byte_t const *__pyx_v_end; size_t __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
0784: cdef:
+0785: size_t count = 0
__pyx_v_count = 0;
+0786: size_t size = that.endex - that.start
__pyx_v_size = (__pyx_v_that->endex - __pyx_v_that->start);
0787: const byte_t* ptr
0788: const byte_t* end
0789:
+0790: if start > size:
__pyx_t_1 = ((__pyx_v_start > __pyx_v_size) != 0); if (__pyx_t_1) { /* … */ }
+0791: start = size # trim to end
__pyx_v_start = __pyx_v_size;
+0792: if endex > size:
__pyx_t_1 = ((__pyx_v_endex > __pyx_v_size) != 0); if (__pyx_t_1) { /* … */ }
+0793: endex = size # trim to end
__pyx_v_endex = __pyx_v_size;
+0794: if endex < start:
__pyx_t_1 = ((__pyx_v_endex < __pyx_v_start) != 0); if (__pyx_t_1) { /* … */ }
+0795: endex = start # clamp negative length
__pyx_v_endex = __pyx_v_start;
0796:
+0797: ptr = &that.data[that.start + start]
__pyx_v_ptr = (&(__pyx_v_that->data[(__pyx_v_that->start + __pyx_v_start)]));
+0798: end = &that.data[that.start + endex]
__pyx_v_end = (&(__pyx_v_that->data[(__pyx_v_that->start + __pyx_v_endex)]));
0799:
+0800: while ptr != end:
while (1) { __pyx_t_1 = ((__pyx_v_ptr != __pyx_v_end) != 0); if (!__pyx_t_1) break;
+0801: if ptr[0] == value:
__pyx_t_1 = (((__pyx_v_ptr[0]) == __pyx_v_value) != 0); if (__pyx_t_1) { /* … */ }
+0802: count += 1
__pyx_v_count = (__pyx_v_count + 1);
+0803: ptr += 1
__pyx_v_ptr = (__pyx_v_ptr + 1); }
+0804: return count
__pyx_r = __pyx_v_count; goto __pyx_L0;
0805:
0806:
+0807: cdef size_t Block_Count_(const Block_* that, size_t start, size_t endex,
static size_t __pyx_f_10bytesparse_2_c_Block_Count_(Block_ const *__pyx_v_that, size_t __pyx_v_start, size_t __pyx_v_endex, size_t __pyx_v_size, byte_t const *__pyx_v_buffer) { size_t __pyx_v_count; size_t __pyx_v_size2; byte_t const *__pyx_v_ptr; byte_t const *__pyx_v_end; size_t __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
0808: size_t size, const byte_t* buffer) nogil:
0809: cdef:
+0810: size_t count = 0
__pyx_v_count = 0;
0811: size_t size2
0812: const byte_t* ptr
0813: const byte_t* end
0814:
+0815: if size == 1: # faster code for single byte
__pyx_t_1 = ((__pyx_v_size == 1) != 0); if (__pyx_t_1) { /* … */ }
+0816: return Block_Count__(that, start, endex, buffer[0])
__pyx_r = __pyx_f_10bytesparse_2_c_Block_Count__(__pyx_v_that, __pyx_v_start, __pyx_v_endex, (__pyx_v_buffer[0])); goto __pyx_L0;
0817:
+0818: elif size:
__pyx_t_1 = (__pyx_v_size != 0); if (__pyx_t_1) { /* … */ }
+0819: size2 = that.endex - that.start
__pyx_v_size2 = (__pyx_v_that->endex - __pyx_v_that->start);
0820:
+0821: if start > size2:
__pyx_t_1 = ((__pyx_v_start > __pyx_v_size2) != 0); if (__pyx_t_1) { /* … */ }
+0822: start = size2 # trim to end
__pyx_v_start = __pyx_v_size2;
+0823: if endex > size2:
__pyx_t_1 = ((__pyx_v_endex > __pyx_v_size2) != 0); if (__pyx_t_1) { /* … */ }
+0824: endex = size2 # trim to end
__pyx_v_endex = __pyx_v_size2;
+0825: if endex < start:
__pyx_t_1 = ((__pyx_v_endex < __pyx_v_start) != 0); if (__pyx_t_1) { /* … */ }
+0826: endex = start # clamp negative length
__pyx_v_endex = __pyx_v_start;
0827:
+0828: if size <= size2 and size <= endex - start:
__pyx_t_2 = ((__pyx_v_size <= __pyx_v_size2) != 0); if (__pyx_t_2) { } else { __pyx_t_1 = __pyx_t_2; goto __pyx_L8_bool_binop_done; } __pyx_t_2 = ((__pyx_v_size <= (__pyx_v_endex - __pyx_v_start)) != 0); __pyx_t_1 = __pyx_t_2; __pyx_L8_bool_binop_done:; if (__pyx_t_1) { /* … */ }
+0829: size2 = endex - size + 1
__pyx_v_size2 = ((__pyx_v_endex - __pyx_v_size) + 1);
0830:
+0831: if start > size2:
__pyx_t_1 = ((__pyx_v_start > __pyx_v_size2) != 0); if (__pyx_t_1) { /* … */ }
+0832: start = size2 # trim to end
__pyx_v_start = __pyx_v_size2;
+0833: if endex > size2:
__pyx_t_1 = ((__pyx_v_endex > __pyx_v_size2) != 0); if (__pyx_t_1) { /* … */ }
+0834: endex = size2 # trim to end
__pyx_v_endex = __pyx_v_size2;
0835:
+0836: ptr = &that.data[that.start + start]
__pyx_v_ptr = (&(__pyx_v_that->data[(__pyx_v_that->start + __pyx_v_start)]));
+0837: end = &that.data[that.start + endex]
__pyx_v_end = (&(__pyx_v_that->data[(__pyx_v_that->start + __pyx_v_endex)]));
0838:
+0839: while ptr < end:
while (1) { __pyx_t_1 = ((__pyx_v_ptr < __pyx_v_end) != 0); if (!__pyx_t_1) break;
+0840: if ptr[0] == buffer[0]: # faster pruning
__pyx_t_1 = (((__pyx_v_ptr[0]) == (__pyx_v_buffer[0])) != 0); if (__pyx_t_1) { /* … */ }
+0841: if not memcmp(ptr, buffer, size):
__pyx_t_1 = ((!(memcmp(__pyx_v_ptr, __pyx_v_buffer, __pyx_v_size) != 0)) != 0); if (__pyx_t_1) { /* … */ }
+0842: ptr += size - 1
__pyx_v_ptr = (__pyx_v_ptr + (__pyx_v_size - 1));
+0843: count += 1
__pyx_v_count = (__pyx_v_count + 1);
+0844: ptr += 1
__pyx_v_ptr = (__pyx_v_ptr + 1); }
+0845: return count
__pyx_r = __pyx_v_count; goto __pyx_L0;
0846:
0847:
+0848: cdef size_t Block_Count(const Block_* that, ssize_t start, ssize_t endex,
static size_t __pyx_f_10bytesparse_2_c_Block_Count(Block_ const *__pyx_v_that, Py_ssize_t __pyx_v_start, Py_ssize_t __pyx_v_endex, size_t __pyx_v_size, byte_t const *__pyx_v_buffer) { Py_ssize_t __pyx_v_ssize; size_t __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
0849: size_t size, const byte_t* buffer) nogil:
0850: cdef:
+0851: ssize_t ssize = <ssize_t>(that.endex - that.start)
__pyx_v_ssize = ((Py_ssize_t)(__pyx_v_that->endex - __pyx_v_that->start));
0852:
+0853: if start < 0:
__pyx_t_1 = ((__pyx_v_start < 0) != 0); if (__pyx_t_1) { /* … */ }
+0854: start += ssize # anchor to end
__pyx_v_start = (__pyx_v_start + __pyx_v_ssize);
+0855: if start < 0:
__pyx_t_1 = ((__pyx_v_start < 0) != 0); if (__pyx_t_1) { /* … */ }
+0856: start = 0 # trim to start
__pyx_v_start = 0;
0857:
+0858: if endex < 0:
__pyx_t_1 = ((__pyx_v_endex < 0) != 0); if (__pyx_t_1) { /* … */ }
+0859: endex += ssize # anchor to end
__pyx_v_endex = (__pyx_v_endex + __pyx_v_ssize);
+0860: if endex < 0:
__pyx_t_1 = ((__pyx_v_endex < 0) != 0); if (__pyx_t_1) { /* … */ }
+0861: endex = 0 # trim to start
__pyx_v_endex = 0;
0862:
+0863: return Block_Count_(that, <size_t>start, <size_t>endex, size, buffer)
__pyx_r = __pyx_f_10bytesparse_2_c_Block_Count_(__pyx_v_that, ((size_t)__pyx_v_start), ((size_t)__pyx_v_endex), __pyx_v_size, __pyx_v_buffer); goto __pyx_L0;
0864:
0865:
+0866: cdef Block_* Block_Reserve_(Block_* that, size_t offset, size_t size, bint zero) except NULL:
static Block_ *__pyx_f_10bytesparse_2_c_Block_Reserve_(Block_ *__pyx_v_that, size_t __pyx_v_offset, size_t __pyx_v_size, int __pyx_v_zero) { size_t __pyx_v_used; size_t __pyx_v_allocated; Block_ *__pyx_v_ptr; Block_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_Reserve_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_4); __Pyx_AddTraceback("bytesparse._c.Block_Reserve_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
0867: cdef:
0868: size_t used
0869: size_t margin
0870: size_t allocated
0871: Block_* ptr
0872:
+0873: Block_CheckMutable(that)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_Block_CheckMutable(__pyx_v_that); if (unlikely(__pyx_t_1 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 873, __pyx_L1_error)
0874:
+0875: if size:
__pyx_t_2 = (__pyx_v_size != 0); if (__pyx_t_2) { /* … */ }
+0876: if ((size > SIZE_HMAX or
__pyx_t_3 = ((__pyx_v_size > SIZE_HMAX) != 0); if (!__pyx_t_3) { } else { __pyx_t_2 = __pyx_t_3; goto __pyx_L5_bool_binop_done; } /* … */ if (unlikely(__pyx_t_2)) { /* … */ }
+0877: CannotAddSizeU(that.endex, size) or
__pyx_t_3 = (__pyx_f_10bytesparse_2_c_CannotAddSizeU(__pyx_v_that->endex, __pyx_v_size) != 0); if (!__pyx_t_3) { } else { __pyx_t_2 = __pyx_t_3; goto __pyx_L5_bool_binop_done; }
+0878: that.endex + size > SIZE_HMAX)):
__pyx_t_3 = (((__pyx_v_that->endex + __pyx_v_size) > SIZE_HMAX) != 0); __pyx_t_2 = __pyx_t_3; __pyx_L5_bool_binop_done:;
+0879: raise OverflowError('size overflow')
__pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_OverflowError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 879, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_Raise(__pyx_t_4, 0, 0, 0); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __PYX_ERR(0, 879, __pyx_L1_error)
0880:
+0881: used = that.endex - that.start
__pyx_v_used = (__pyx_v_that->endex - __pyx_v_that->start);
+0882: if offset > used:
__pyx_t_2 = ((__pyx_v_offset > __pyx_v_used) != 0); if (unlikely(__pyx_t_2)) { /* … */ }
+0883: raise IndexError('index out of range')
__pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_IndexError, __pyx_tuple__6, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 883, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_Raise(__pyx_t_4, 0, 0, 0); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __PYX_ERR(0, 883, __pyx_L1_error) /* … */ __pyx_tuple__6 = PyTuple_Pack(1, __pyx_kp_u_index_out_of_range); if (unlikely(!__pyx_tuple__6)) __PYX_ERR(0, 883, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__6); __Pyx_GIVEREF(__pyx_tuple__6);
0884:
0885: # Prefer the side where there is less data to shift, which also favors the extremes
+0886: if offset >= (used >> 1):
__pyx_t_2 = ((__pyx_v_offset >= (__pyx_v_used >> 1)) != 0); if (__pyx_t_2) { /* … */ goto __pyx_L9; }
+0887: if size > that.allocated - that.endex:
__pyx_t_2 = ((__pyx_v_size > (__pyx_v_that->allocated - __pyx_v_that->endex)) != 0); if (__pyx_t_2) { /* … */ }
0888: # Calculate the upsized allocation
+0889: allocated = Upsize(that.allocated, that.allocated + size)
__pyx_v_allocated = __pyx_f_10bytesparse_2_c_Upsize(__pyx_v_that->allocated, (__pyx_v_that->allocated + __pyx_v_size));
+0890: if allocated > SIZE_HMAX:
__pyx_t_2 = ((__pyx_v_allocated > SIZE_HMAX) != 0); if (unlikely(__pyx_t_2)) { /* … */ }
+0891: raise MemoryError()
PyErr_NoMemory(); __PYX_ERR(0, 891, __pyx_L1_error)
0892:
0893: # Reallocate, including the header
+0894: ptr = <Block_*>PyMem_Realloc(that, Block_HEADING + (allocated * sizeof(byte_t)))
__pyx_v_ptr = ((Block_ *)PyMem_Realloc(__pyx_v_that, (Block_HEADING + (__pyx_v_allocated * (sizeof(byte_t))))));
+0895: if ptr == NULL:
__pyx_t_2 = ((__pyx_v_ptr == NULL) != 0); if (unlikely(__pyx_t_2)) { /* … */ }
+0896: raise MemoryError()
PyErr_NoMemory(); __PYX_ERR(0, 896, __pyx_L1_error)
0897:
0898: # Reassign to that
+0899: that = ptr
__pyx_v_that = __pyx_v_ptr;
+0900: that.allocated = allocated # update
__pyx_v_that->allocated = __pyx_v_allocated;
0901:
0902: # Shift elements to make room for reservation at the requested offset
+0903: CheckAddSizeU(offset, that.start)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_CheckAddSizeU(__pyx_v_offset, __pyx_v_that->start); if (unlikely(__pyx_t_1 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 903, __pyx_L1_error)
+0904: offset += that.start
__pyx_v_offset = (__pyx_v_offset + __pyx_v_that->start);
+0905: used = that.endex - offset
__pyx_v_used = (__pyx_v_that->endex - __pyx_v_offset);
+0906: if used:
__pyx_t_2 = (__pyx_v_used != 0); if (__pyx_t_2) { /* … */ }
+0907: memmove(&that.data[offset + size],
(void)(memmove((&(__pyx_v_that->data[(__pyx_v_offset + __pyx_v_size)])), (&(__pyx_v_that->data[__pyx_v_offset])), (__pyx_v_used * (sizeof(byte_t)))));
0908: &that.data[offset],
0909: used * sizeof(byte_t))
+0910: if zero:
__pyx_t_2 = (__pyx_v_zero != 0); if (__pyx_t_2) { /* … */ }
+0911: memset(&that.data[offset], 0, size * sizeof(byte_t)) # pad with zeros
(void)(memset((&(__pyx_v_that->data[__pyx_v_offset])), 0, (__pyx_v_size * (sizeof(byte_t)))));
+0912: that.endex += size
__pyx_v_that->endex = (__pyx_v_that->endex + __pyx_v_size);
0913:
0914: else:
+0915: if size <= that.start:
/*else*/ { __pyx_t_2 = ((__pyx_v_size <= __pyx_v_that->start) != 0); if (__pyx_t_2) { /* … */ goto __pyx_L15; }
0916: # Shift elements to make room for reservation at the requested offset
+0917: that.start -= size
__pyx_v_that->start = (__pyx_v_that->start - __pyx_v_size);
+0918: if offset:
__pyx_t_2 = (__pyx_v_offset != 0); if (__pyx_t_2) { /* … */ }
+0919: memmove(&that.data[that.start],
(void)(memmove((&(__pyx_v_that->data[__pyx_v_that->start])), (&(__pyx_v_that->data[(__pyx_v_that->start + __pyx_v_size)])), (__pyx_v_offset * (sizeof(byte_t)))));
0920: &that.data[that.start + size],
0921: offset * sizeof(byte_t))
+0922: if zero:
__pyx_t_2 = (__pyx_v_zero != 0); if (__pyx_t_2) { /* … */ }
+0923: memset(&that.data[that.start + offset], 0, size * sizeof(byte_t)) # pad with zeros
(void)(memset((&(__pyx_v_that->data[(__pyx_v_that->start + __pyx_v_offset)])), 0, (__pyx_v_size * (sizeof(byte_t)))));
0924:
0925: else:
0926: # Calculate the upsized allocation
+0927: CheckAddSizeU(that.allocated, size)
/*else*/ {
__pyx_t_1 = __pyx_f_10bytesparse_2_c_CheckAddSizeU(__pyx_v_that->allocated, __pyx_v_size); if (unlikely(__pyx_t_1 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 927, __pyx_L1_error)
+0928: allocated = Upsize(that.allocated, that.allocated + size)
__pyx_v_allocated = __pyx_f_10bytesparse_2_c_Upsize(__pyx_v_that->allocated, (__pyx_v_that->allocated + __pyx_v_size));
+0929: if allocated > SIZE_HMAX:
__pyx_t_2 = ((__pyx_v_allocated > SIZE_HMAX) != 0); if (unlikely(__pyx_t_2)) { /* … */ }
+0930: raise MemoryError()
PyErr_NoMemory(); __PYX_ERR(0, 930, __pyx_L1_error)
0931:
0932: # Allocate a new chunk, including the header
+0933: ptr = <Block_*>PyMem_Calloc(Block_HEADING + (allocated * sizeof(byte_t)), 1, zero)
__pyx_v_ptr = ((Block_ *)__pyx_f_10bytesparse_2_c_PyMem_Calloc((Block_HEADING + (__pyx_v_allocated * (sizeof(byte_t)))), 1, __pyx_v_zero));
+0934: if ptr == NULL:
__pyx_t_2 = ((__pyx_v_ptr == NULL) != 0); if (unlikely(__pyx_t_2)) { /* … */ }
+0935: raise MemoryError()
PyErr_NoMemory(); __PYX_ERR(0, 935, __pyx_L1_error)
0936:
0937: # Prepare the new chunk aligning towards the end
+0938: ptr.address = that.address
__pyx_t_5 = __pyx_v_that->address; __pyx_v_ptr->address = __pyx_t_5;
+0939: ptr.references = that.references # transfer ownership
__pyx_t_6 = __pyx_v_that->references; __pyx_v_ptr->references = __pyx_t_6;
+0940: ptr.allocated = allocated
__pyx_v_ptr->allocated = __pyx_v_allocated;
+0941: ptr.endex = ptr.allocated - MARGIN # leave some room
__pyx_v_ptr->endex = (__pyx_v_ptr->allocated - MARGIN);
+0942: ptr.start = ptr.endex - used - size
__pyx_v_ptr->start = ((__pyx_v_ptr->endex - __pyx_v_used) - __pyx_v_size);
0943:
0944: # Shift/copy elements to make room for reservation at the requested offset
+0945: if offset:
__pyx_t_2 = (__pyx_v_offset != 0); if (__pyx_t_2) { /* … */ }
+0946: used -= offset # prepare for later
__pyx_v_used = (__pyx_v_used - __pyx_v_offset);
+0947: memcpy(&ptr.data[ptr.start],
(void)(memcpy((&(__pyx_v_ptr->data[__pyx_v_ptr->start])), (&(__pyx_v_that->data[__pyx_v_that->start])), (__pyx_v_offset * (sizeof(byte_t)))));
0948: &that.data[that.start],
0949: offset * sizeof(byte_t))
+0950: if used:
__pyx_t_2 = (__pyx_v_used != 0); if (__pyx_t_2) { /* … */ }
+0951: memcpy(&ptr.data[ptr.start + offset + size],
(void)(memcpy((&(__pyx_v_ptr->data[((__pyx_v_ptr->start + __pyx_v_offset) + __pyx_v_size)])), (&(__pyx_v_that->data[(__pyx_v_that->start + __pyx_v_offset)])), (__pyx_v_used * (sizeof(byte_t)))));
0952: &that.data[that.start + offset],
0953: used * sizeof(byte_t))
0954:
0955: # Reassign to that
+0956: PyMem_Free(that)
PyMem_Free(__pyx_v_that);
+0957: that = ptr
__pyx_v_that = __pyx_v_ptr; } __pyx_L15:; } __pyx_L9:;
0958:
+0959: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
0960:
0961:
+0962: cdef Block_* Block_Delete_(Block_* that, size_t offset, size_t size) except NULL:
static Block_ *__pyx_f_10bytesparse_2_c_Block_Delete_(Block_ *__pyx_v_that, size_t __pyx_v_offset, size_t __pyx_v_size) { size_t __pyx_v_allocated; Block_ *__pyx_v_ptr; Block_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_Delete_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_4); __Pyx_AddTraceback("bytesparse._c.Block_Delete_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
0963: cdef:
0964: size_t allocated
0965: Block_* ptr
0966:
+0967: Block_CheckMutable(that)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_Block_CheckMutable(__pyx_v_that); if (unlikely(__pyx_t_1 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 967, __pyx_L1_error)
0968:
+0969: if size:
__pyx_t_2 = (__pyx_v_size != 0); if (__pyx_t_2) { /* … */ }
+0970: if ((size > SIZE_HMAX or
__pyx_t_3 = ((__pyx_v_size > SIZE_HMAX) != 0); if (!__pyx_t_3) { } else { __pyx_t_2 = __pyx_t_3; goto __pyx_L5_bool_binop_done; } /* … */ if (unlikely(__pyx_t_2)) { /* … */ }
+0971: CannotAddSizeU(offset, size) or
__pyx_t_3 = (__pyx_f_10bytesparse_2_c_CannotAddSizeU(__pyx_v_offset, __pyx_v_size) != 0); if (!__pyx_t_3) { } else { __pyx_t_2 = __pyx_t_3; goto __pyx_L5_bool_binop_done; }
+0972: offset + size > SIZE_HMAX or
__pyx_t_3 = (((__pyx_v_offset + __pyx_v_size) > SIZE_HMAX) != 0); if (!__pyx_t_3) { } else { __pyx_t_2 = __pyx_t_3; goto __pyx_L5_bool_binop_done; }
+0973: CannotAddSizeU(offset, that.start) or
__pyx_t_3 = (__pyx_f_10bytesparse_2_c_CannotAddSizeU(__pyx_v_offset, __pyx_v_that->start) != 0); if (!__pyx_t_3) { } else { __pyx_t_2 = __pyx_t_3; goto __pyx_L5_bool_binop_done; }
+0974: that.start > SIZE_HMAX)):
__pyx_t_3 = ((__pyx_v_that->start > SIZE_HMAX) != 0); __pyx_t_2 = __pyx_t_3; __pyx_L5_bool_binop_done:;
+0975: raise OverflowError('size overflow')
__pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_OverflowError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 975, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_Raise(__pyx_t_4, 0, 0, 0); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __PYX_ERR(0, 975, __pyx_L1_error)
0976:
+0977: if that.endex < that.start + offset + size:
__pyx_t_2 = ((__pyx_v_that->endex < ((__pyx_v_that->start + __pyx_v_offset) + __pyx_v_size)) != 0); if (unlikely(__pyx_t_2)) { /* … */ }
+0978: raise IndexError('index out of range')
__pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_IndexError, __pyx_tuple__6, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 978, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_Raise(__pyx_t_4, 0, 0, 0); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __PYX_ERR(0, 978, __pyx_L1_error)
0979:
0980: # Calculate the downsized allocation
+0981: allocated = Downsize(that.allocated, that.allocated - size)
__pyx_v_allocated = __pyx_f_10bytesparse_2_c_Downsize(__pyx_v_that->allocated, (__pyx_v_that->allocated - __pyx_v_size));
+0982: if allocated > SIZE_HMAX:
__pyx_t_2 = ((__pyx_v_allocated > SIZE_HMAX) != 0); if (unlikely(__pyx_t_2)) { /* … */ }
+0983: raise MemoryError()
PyErr_NoMemory(); __PYX_ERR(0, 983, __pyx_L1_error)
0984:
+0985: if offset == 0:
__pyx_t_2 = ((__pyx_v_offset == 0) != 0); if (__pyx_t_2) { /* … */ goto __pyx_L12; }
+0986: if allocated == that.allocated:
__pyx_t_2 = ((__pyx_v_allocated == __pyx_v_that->allocated) != 0); if (__pyx_t_2) { /* … */ goto __pyx_L13; }
0987: # Just skip initial if not reallocated and no offset
+0988: that.start += size
__pyx_v_that->start = (__pyx_v_that->start + __pyx_v_size);
0989: else:
0990: # Shift elements to make for the deleted gap at the beginning
+0991: offset += that.start
/*else*/ { __pyx_v_offset = (__pyx_v_offset + __pyx_v_that->start);
+0992: memmove(&that.data[MARGIN], # realign to initial MARGIN
(void)(memmove((&(__pyx_v_that->data[MARGIN])), (&(__pyx_v_that->data[(__pyx_v_offset + __pyx_v_size)])), ((__pyx_v_that->endex - (__pyx_v_offset + __pyx_v_size)) * (sizeof(byte_t)))));
0993: &that.data[offset + size],
0994: (that.endex - (offset + size)) * sizeof(byte_t))
+0995: size = that.endex - that.start - size
__pyx_v_size = ((__pyx_v_that->endex - __pyx_v_that->start) - __pyx_v_size);
+0996: that.start = MARGIN
__pyx_v_that->start = MARGIN;
+0997: that.endex = MARGIN + size
__pyx_v_that->endex = (MARGIN + __pyx_v_size); } __pyx_L13:;
0998: else:
0999: # Shift elements to make for the deleted gap at the requested offset
+1000: offset += that.start
/*else*/ { __pyx_v_offset = (__pyx_v_offset + __pyx_v_that->start);
+1001: memmove(&that.data[offset],
(void)(memmove((&(__pyx_v_that->data[__pyx_v_offset])), (&(__pyx_v_that->data[(__pyx_v_offset + __pyx_v_size)])), ((__pyx_v_that->endex - (__pyx_v_offset + __pyx_v_size)) * (sizeof(byte_t)))));
1002: &that.data[offset + size],
1003: (that.endex - (offset + size)) * sizeof(byte_t))
+1004: that.endex -= size
__pyx_v_that->endex = (__pyx_v_that->endex - __pyx_v_size); } __pyx_L12:;
1005:
+1006: if allocated != that.allocated:
__pyx_t_2 = ((__pyx_v_allocated != __pyx_v_that->allocated) != 0); if (__pyx_t_2) { /* … */ }
1007: # Reallocate, including the header
+1008: ptr = <Block_*>PyMem_Realloc(that, Block_HEADING + (allocated * sizeof(byte_t)))
__pyx_v_ptr = ((Block_ *)PyMem_Realloc(__pyx_v_that, (Block_HEADING + (__pyx_v_allocated * (sizeof(byte_t))))));
+1009: if ptr == NULL:
__pyx_t_2 = ((__pyx_v_ptr == NULL) != 0); if (unlikely(__pyx_t_2)) { /* … */ }
+1010: raise MemoryError()
PyErr_NoMemory(); __PYX_ERR(0, 1010, __pyx_L1_error)
1011:
1012: # Reassign to that
+1013: that = ptr
__pyx_v_that = __pyx_v_ptr;
+1014: that.allocated = allocated
__pyx_v_that->allocated = __pyx_v_allocated;
1015:
+1016: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
1017:
1018:
+1019: cdef Block_* Block_Clear(Block_* that) except NULL:
static Block_ *__pyx_f_10bytesparse_2_c_Block_Clear(Block_ *__pyx_v_that) { Block_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_Clear", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Block_Clear", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+1020: return Block_Delete_(that, 0, that.endex - that.start)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_Block_Delete_(__pyx_v_that, 0, (__pyx_v_that->endex - __pyx_v_that->start)); if (unlikely(__pyx_t_1 == ((Block_ *)NULL))) __PYX_ERR(0, 1020, __pyx_L1_error)
__pyx_r = __pyx_t_1;
goto __pyx_L0;
1021:
1022:
+1023: cdef byte_t* Block_At_(Block_* that, size_t offset) nogil:
static byte_t *__pyx_f_10bytesparse_2_c_Block_At_(Block_ *__pyx_v_that, size_t __pyx_v_offset) { byte_t *__pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
+1024: return &that.data[that.start + offset]
__pyx_r = (&(__pyx_v_that->data[(__pyx_v_that->start + __pyx_v_offset)])); goto __pyx_L0;
1025:
1026:
+1027: cdef const byte_t* Block_At__(const Block_* that, size_t offset) nogil:
static byte_t const *__pyx_f_10bytesparse_2_c_Block_At__(Block_ const *__pyx_v_that, size_t __pyx_v_offset) { byte_t const *__pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
+1028: return &that.data[that.start + offset]
__pyx_r = (&(__pyx_v_that->data[(__pyx_v_that->start + __pyx_v_offset)])); goto __pyx_L0;
1029:
1030:
+1031: cdef byte_t Block_Get__(const Block_* that, size_t offset) nogil:
static byte_t __pyx_f_10bytesparse_2_c_Block_Get__(Block_ const *__pyx_v_that, size_t __pyx_v_offset) { byte_t __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
+1032: return that.data[that.start + offset]
__pyx_r = (__pyx_v_that->data[(__pyx_v_that->start + __pyx_v_offset)]); goto __pyx_L0;
1033:
1034:
+1035: cdef int Block_Get_(const Block_* that, size_t offset) except -1:
static int __pyx_f_10bytesparse_2_c_Block_Get_(Block_ const *__pyx_v_that, size_t __pyx_v_offset) { int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_Get_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("bytesparse._c.Block_Get_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+1036: CheckAddSizeU(that.start, offset)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_CheckAddSizeU(__pyx_v_that->start, __pyx_v_offset); if (unlikely(__pyx_t_1 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 1036, __pyx_L1_error)
+1037: offset += that.start
__pyx_v_offset = (__pyx_v_offset + __pyx_v_that->start);
1038:
+1039: if offset < that.endex:
__pyx_t_2 = ((__pyx_v_offset < __pyx_v_that->endex) != 0); if (likely(__pyx_t_2)) { /* … */ }
+1040: return <int><unsigned>that.data[offset]
__pyx_r = ((int)((unsigned int)(__pyx_v_that->data[__pyx_v_offset]))); goto __pyx_L0;
1041: else:
+1042: raise IndexError('index out of range')
/*else*/ { __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_IndexError, __pyx_tuple__6, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1042, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_Raise(__pyx_t_3, 0, 0, 0); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(0, 1042, __pyx_L1_error) }
1043:
1044:
+1045: cdef int Block_Get(const Block_* that, ssize_t offset) except -1:
static int __pyx_f_10bytesparse_2_c_Block_Get(Block_ const *__pyx_v_that, Py_ssize_t __pyx_v_offset) { int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_Get", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.Block_Get", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+1046: if offset < 0:
__pyx_t_1 = ((__pyx_v_offset < 0) != 0); if (__pyx_t_1) { /* … */ }
+1047: offset += <ssize_t>(that.endex - that.start) # anchor to end
__pyx_v_offset = (__pyx_v_offset + ((Py_ssize_t)(__pyx_v_that->endex - __pyx_v_that->start)));
+1048: if offset < 0:
__pyx_t_1 = ((__pyx_v_offset < 0) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+1049: raise IndexError('index out of range')
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_IndexError, __pyx_tuple__6, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1049, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 1049, __pyx_L1_error)
1050:
+1051: return Block_Get_(that, <size_t>offset)
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Block_Get_(__pyx_v_that, ((size_t)__pyx_v_offset)); if (unlikely(__pyx_t_3 == ((int)-1))) __PYX_ERR(0, 1051, __pyx_L1_error)
__pyx_r = __pyx_t_3;
goto __pyx_L0;
1052:
1053:
+1054: cdef byte_t Block_Set__(Block_* that, size_t offset, byte_t value) nogil:
static byte_t __pyx_f_10bytesparse_2_c_Block_Set__(Block_ *__pyx_v_that, size_t __pyx_v_offset, byte_t __pyx_v_value) { byte_t __pyx_v_backup; byte_t __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
1055: cdef:
1056: byte_t backup
1057:
+1058: offset += that.start
__pyx_v_offset = (__pyx_v_offset + __pyx_v_that->start);
+1059: backup = that.data[offset]
__pyx_v_backup = (__pyx_v_that->data[__pyx_v_offset]);
+1060: that.data[offset] = value
(__pyx_v_that->data[__pyx_v_offset]) = __pyx_v_value;
+1061: return backup
__pyx_r = __pyx_v_backup; goto __pyx_L0;
1062:
1063:
+1064: cdef int Block_Set_(Block_* that, size_t offset, byte_t value) except -1:
static int __pyx_f_10bytesparse_2_c_Block_Set_(Block_ *__pyx_v_that, size_t __pyx_v_offset, byte_t __pyx_v_value) { int __pyx_v_backup; int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_Set_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("bytesparse._c.Block_Set_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
1065: cdef:
1066: int backup
1067:
1068: # Block_CheckMutable(that)
+1069: CheckAddSizeU(that.start, offset)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_CheckAddSizeU(__pyx_v_that->start, __pyx_v_offset); if (unlikely(__pyx_t_1 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 1069, __pyx_L1_error)
+1070: offset += that.start
__pyx_v_offset = (__pyx_v_offset + __pyx_v_that->start);
1071:
+1072: if offset < that.endex:
__pyx_t_2 = ((__pyx_v_offset < __pyx_v_that->endex) != 0); if (likely(__pyx_t_2)) { /* … */ }
+1073: backup = <int><unsigned>that.data[offset]
__pyx_v_backup = ((int)((unsigned int)(__pyx_v_that->data[__pyx_v_offset])));
+1074: that.data[offset] = value
(__pyx_v_that->data[__pyx_v_offset]) = __pyx_v_value;
+1075: return backup
__pyx_r = __pyx_v_backup; goto __pyx_L0;
1076: else:
+1077: raise IndexError('index out of range')
/*else*/ { __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_IndexError, __pyx_tuple__6, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1077, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_Raise(__pyx_t_3, 0, 0, 0); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(0, 1077, __pyx_L1_error) }
1078:
1079:
+1080: cdef int Block_Set(Block_* that, ssize_t offset, byte_t value) except -1:
static int __pyx_f_10bytesparse_2_c_Block_Set(Block_ *__pyx_v_that, Py_ssize_t __pyx_v_offset, byte_t __pyx_v_value) { int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_Set", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.Block_Set", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+1081: if offset < 0:
__pyx_t_1 = ((__pyx_v_offset < 0) != 0); if (__pyx_t_1) { /* … */ }
+1082: offset += <ssize_t>(that.endex - that.start) # anchor to end
__pyx_v_offset = (__pyx_v_offset + ((Py_ssize_t)(__pyx_v_that->endex - __pyx_v_that->start)));
+1083: if offset < 0:
__pyx_t_1 = ((__pyx_v_offset < 0) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+1084: raise IndexError('index out of range')
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_IndexError, __pyx_tuple__6, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1084, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 1084, __pyx_L1_error)
1085:
+1086: return Block_Set_(that, <size_t>offset, value)
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Block_Set_(__pyx_v_that, ((size_t)__pyx_v_offset), __pyx_v_value); if (unlikely(__pyx_t_3 == ((int)-1))) __PYX_ERR(0, 1086, __pyx_L1_error)
__pyx_r = __pyx_t_3;
goto __pyx_L0;
1087:
1088:
+1089: cdef Block_* Block_Pop__(Block_* that, byte_t* value) except NULL:
static Block_ *__pyx_f_10bytesparse_2_c_Block_Pop__(Block_ *__pyx_v_that, byte_t *__pyx_v_value) { Block_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_Pop__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("bytesparse._c.Block_Pop__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
1090: # Block_CheckMutable(that)
1091:
+1092: if that.start < that.endex:
__pyx_t_1 = ((__pyx_v_that->start < __pyx_v_that->endex) != 0); if (likely(__pyx_t_1)) { /* … */ }
+1093: if value:
__pyx_t_1 = (__pyx_v_value != 0); if (__pyx_t_1) { /* … */ }
+1094: value[0] = that.data[that.endex - 1] # backup
(__pyx_v_value[0]) = (__pyx_v_that->data[(__pyx_v_that->endex - 1)]);
1095:
+1096: return Block_Delete_(that, that.endex - that.start - 1, 1)
__pyx_t_2 = __pyx_f_10bytesparse_2_c_Block_Delete_(__pyx_v_that, ((__pyx_v_that->endex - __pyx_v_that->start) - 1), 1); if (unlikely(__pyx_t_2 == ((Block_ *)NULL))) __PYX_ERR(0, 1096, __pyx_L1_error)
__pyx_r = __pyx_t_2;
goto __pyx_L0;
1097: else:
+1098: raise IndexError('pop index out of range')
/*else*/ { __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_IndexError, __pyx_tuple__7, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1098, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_Raise(__pyx_t_3, 0, 0, 0); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(0, 1098, __pyx_L1_error) } /* … */ __pyx_tuple__7 = PyTuple_Pack(1, __pyx_kp_u_pop_index_out_of_range); if (unlikely(!__pyx_tuple__7)) __PYX_ERR(0, 1098, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__7); __Pyx_GIVEREF(__pyx_tuple__7);
1099:
1100:
+1101: cdef Block_* Block_Pop_(Block_* that, size_t offset, byte_t* value) except NULL:
static Block_ *__pyx_f_10bytesparse_2_c_Block_Pop_(Block_ *__pyx_v_that, size_t __pyx_v_offset, byte_t *__pyx_v_value) { Block_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_Pop_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_4); __Pyx_AddTraceback("bytesparse._c.Block_Pop_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
1102: # Block_CheckMutable(that)
+1103: CheckAddSizeU(that.start, offset)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_CheckAddSizeU(__pyx_v_that->start, __pyx_v_offset); if (unlikely(__pyx_t_1 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 1103, __pyx_L1_error)
1104:
+1105: if that.start + offset < that.endex:
__pyx_t_2 = (((__pyx_v_that->start + __pyx_v_offset) < __pyx_v_that->endex) != 0); if (likely(__pyx_t_2)) { /* … */ }
+1106: if value:
__pyx_t_2 = (__pyx_v_value != 0); if (__pyx_t_2) { /* … */ }
+1107: value[0] = that.data[that.start + offset] # backup
(__pyx_v_value[0]) = (__pyx_v_that->data[(__pyx_v_that->start + __pyx_v_offset)]);
1108:
+1109: return Block_Delete_(that, offset, 1)
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Block_Delete_(__pyx_v_that, __pyx_v_offset, 1); if (unlikely(__pyx_t_3 == ((Block_ *)NULL))) __PYX_ERR(0, 1109, __pyx_L1_error)
__pyx_r = __pyx_t_3;
goto __pyx_L0;
1110: else:
+1111: raise IndexError('pop index out of range')
/*else*/ { __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_IndexError, __pyx_tuple__7, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1111, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_Raise(__pyx_t_4, 0, 0, 0); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __PYX_ERR(0, 1111, __pyx_L1_error) }
1112:
1113:
+1114: cdef Block_* Block_Pop(Block_* that, ssize_t offset, byte_t* value) except NULL:
static Block_ *__pyx_f_10bytesparse_2_c_Block_Pop(Block_ *__pyx_v_that, Py_ssize_t __pyx_v_offset, byte_t *__pyx_v_value) { Block_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_Pop", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.Block_Pop", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+1115: if offset < 0:
__pyx_t_1 = ((__pyx_v_offset < 0) != 0); if (__pyx_t_1) { /* … */ }
+1116: offset += <ssize_t>(that.endex - that.start) # anchor to end
__pyx_v_offset = (__pyx_v_offset + ((Py_ssize_t)(__pyx_v_that->endex - __pyx_v_that->start)));
+1117: if offset < 0:
__pyx_t_1 = ((__pyx_v_offset < 0) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+1118: raise IndexError('pop index out of range')
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_IndexError, __pyx_tuple__7, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1118, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 1118, __pyx_L1_error)
1119:
+1120: return Block_Pop_(that, <size_t>offset, value)
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Block_Pop_(__pyx_v_that, ((size_t)__pyx_v_offset), __pyx_v_value); if (unlikely(__pyx_t_3 == ((Block_ *)NULL))) __PYX_ERR(0, 1120, __pyx_L1_error)
__pyx_r = __pyx_t_3;
goto __pyx_L0;
1121:
1122:
+1123: cdef Block_* Block_PopLeft(Block_* that, byte_t* value) except NULL:
static Block_ *__pyx_f_10bytesparse_2_c_Block_PopLeft(Block_ *__pyx_v_that, byte_t *__pyx_v_value) { Block_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_PopLeft", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Block_PopLeft", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+1124: return Block_Pop_(that, 0, value)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_Block_Pop_(__pyx_v_that, 0, __pyx_v_value); if (unlikely(__pyx_t_1 == ((Block_ *)NULL))) __PYX_ERR(0, 1124, __pyx_L1_error)
__pyx_r = __pyx_t_1;
goto __pyx_L0;
1125:
1126:
+1127: cdef Block_* Block_Insert_(Block_* that, size_t offset, byte_t value) except NULL:
static Block_ *__pyx_f_10bytesparse_2_c_Block_Insert_(Block_ *__pyx_v_that, size_t __pyx_v_offset, byte_t __pyx_v_value) { Block_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_Insert_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Block_Insert_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
1128: # Insert the value at the requested offset
+1129: that = Block_Reserve_(that, offset, 1, False)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_Block_Reserve_(__pyx_v_that, __pyx_v_offset, 1, 0); if (unlikely(__pyx_t_1 == ((Block_ *)NULL))) __PYX_ERR(0, 1129, __pyx_L1_error)
__pyx_v_that = __pyx_t_1;
+1130: that.data[that.start + offset] = value
(__pyx_v_that->data[(__pyx_v_that->start + __pyx_v_offset)]) = __pyx_v_value;
+1131: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
1132:
1133:
+1134: cdef Block_* Block_Insert(Block_* that, ssize_t offset, byte_t value) except NULL:
static Block_ *__pyx_f_10bytesparse_2_c_Block_Insert(Block_ *__pyx_v_that, Py_ssize_t __pyx_v_offset, byte_t __pyx_v_value) { Py_ssize_t __pyx_v_size; Block_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_Insert", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Block_Insert", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
1135: cdef:
+1136: ssize_t size = <ssize_t>(that.endex - that.start)
__pyx_v_size = ((Py_ssize_t)(__pyx_v_that->endex - __pyx_v_that->start));
1137:
+1138: if offset < 0:
__pyx_t_1 = ((__pyx_v_offset < 0) != 0); if (__pyx_t_1) { /* … */ goto __pyx_L3; }
+1139: offset += size # anchor to end
__pyx_v_offset = (__pyx_v_offset + __pyx_v_size);
+1140: if offset < 0:
__pyx_t_1 = ((__pyx_v_offset < 0) != 0); if (__pyx_t_1) { /* … */ }
1141: # raise IndexError('index out of range')
+1142: offset = 0 # as per bytearray.insert
__pyx_v_offset = 0;
1143:
+1144: elif offset > size:
__pyx_t_1 = ((__pyx_v_offset > __pyx_v_size) != 0); if (__pyx_t_1) { /* … */ } __pyx_L3:;
1145: # raise IndexError('index out of range')
+1146: offset = size # as per bytearray.insert
__pyx_v_offset = __pyx_v_size;
1147:
+1148: return Block_Insert_(that, <size_t>offset, value)
__pyx_t_2 = __pyx_f_10bytesparse_2_c_Block_Insert_(__pyx_v_that, ((size_t)__pyx_v_offset), __pyx_v_value); if (unlikely(__pyx_t_2 == ((Block_ *)NULL))) __PYX_ERR(0, 1148, __pyx_L1_error)
__pyx_r = __pyx_t_2;
goto __pyx_L0;
1149:
1150:
+1151: cdef Block_* Block_Append(Block_* that, byte_t value) except NULL:
static Block_ *__pyx_f_10bytesparse_2_c_Block_Append(Block_ *__pyx_v_that, byte_t __pyx_v_value) { Block_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_Append", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Block_Append", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
1152: # Insert the value after the end
+1153: that = Block_Reserve_(that, that.endex - that.start, 1, False)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_Block_Reserve_(__pyx_v_that, (__pyx_v_that->endex - __pyx_v_that->start), 1, 0); if (unlikely(__pyx_t_1 == ((Block_ *)NULL))) __PYX_ERR(0, 1153, __pyx_L1_error)
__pyx_v_that = __pyx_t_1;
+1154: that.data[that.endex - 1] = value
(__pyx_v_that->data[(__pyx_v_that->endex - 1)]) = __pyx_v_value;
+1155: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
1156:
1157:
+1158: cdef Block_* Block_AppendLeft(Block_* that, byte_t value) except NULL:
static Block_ *__pyx_f_10bytesparse_2_c_Block_AppendLeft(Block_ *__pyx_v_that, byte_t __pyx_v_value) { Block_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_AppendLeft", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Block_AppendLeft", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
1159: # Insert the value after the end
+1160: that = Block_Reserve_(that, 0, 1, False)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_Block_Reserve_(__pyx_v_that, 0, 1, 0); if (unlikely(__pyx_t_1 == ((Block_ *)NULL))) __PYX_ERR(0, 1160, __pyx_L1_error)
__pyx_v_that = __pyx_t_1;
+1161: that.data[that.start] = value
(__pyx_v_that->data[__pyx_v_that->start]) = __pyx_v_value;
+1162: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
1163:
1164:
+1165: cdef Block_* Block_Extend_(Block_* that, size_t size, const byte_t* buffer) except NULL:
static Block_ *__pyx_f_10bytesparse_2_c_Block_Extend_(Block_ *__pyx_v_that, size_t __pyx_v_size, byte_t const *__pyx_v_buffer) { Block_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_Extend_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Block_Extend_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+1166: if size:
__pyx_t_1 = (__pyx_v_size != 0); if (__pyx_t_1) { /* … */ }
+1167: that = Block_Reserve_(that, that.endex - that.start, size, False)
__pyx_t_2 = __pyx_f_10bytesparse_2_c_Block_Reserve_(__pyx_v_that, (__pyx_v_that->endex - __pyx_v_that->start), __pyx_v_size, 0); if (unlikely(__pyx_t_2 == ((Block_ *)NULL))) __PYX_ERR(0, 1167, __pyx_L1_error)
__pyx_v_that = __pyx_t_2;
+1168: memmove(&that.data[that.endex - size], buffer, size * sizeof(byte_t))
(void)(memmove((&(__pyx_v_that->data[(__pyx_v_that->endex - __pyx_v_size)])), __pyx_v_buffer, (__pyx_v_size * (sizeof(byte_t)))));
+1169: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
1170:
1171:
+1172: cdef Block_* Block_Extend(Block_* that, const Block_* more) except NULL:
static Block_ *__pyx_f_10bytesparse_2_c_Block_Extend(Block_ *__pyx_v_that, Block_ const *__pyx_v_more) { Block_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_Extend", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Block_Extend", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+1173: that = Block_Extend_(that, Block_Length(more), Block_At__(more, 0))
__pyx_t_1 = __pyx_f_10bytesparse_2_c_Block_Extend_(__pyx_v_that, __pyx_f_10bytesparse_2_c_Block_Length(__pyx_v_more), __pyx_f_10bytesparse_2_c_Block_At__(__pyx_v_more, 0)); if (unlikely(__pyx_t_1 == ((Block_ *)NULL))) __PYX_ERR(0, 1173, __pyx_L1_error)
__pyx_v_that = __pyx_t_1;
+1174: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
1175:
1176:
+1177: cdef Block_* Block_ExtendLeft_(Block_* that, size_t size, const byte_t* buffer) except NULL:
static Block_ *__pyx_f_10bytesparse_2_c_Block_ExtendLeft_(Block_ *__pyx_v_that, size_t __pyx_v_size, byte_t const *__pyx_v_buffer) { Block_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_ExtendLeft_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Block_ExtendLeft_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+1178: if size:
__pyx_t_1 = (__pyx_v_size != 0); if (__pyx_t_1) { /* … */ }
+1179: that = Block_Reserve_(that, 0, size, False)
__pyx_t_2 = __pyx_f_10bytesparse_2_c_Block_Reserve_(__pyx_v_that, 0, __pyx_v_size, 0); if (unlikely(__pyx_t_2 == ((Block_ *)NULL))) __PYX_ERR(0, 1179, __pyx_L1_error)
__pyx_v_that = __pyx_t_2;
+1180: memmove(&that.data[that.start], buffer, size * sizeof(byte_t))
(void)(memmove((&(__pyx_v_that->data[__pyx_v_that->start])), __pyx_v_buffer, (__pyx_v_size * (sizeof(byte_t)))));
+1181: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
1182:
1183:
+1184: cdef Block_* Block_ExtendLeft(Block_* that, const Block_* more) except NULL:
static Block_ *__pyx_f_10bytesparse_2_c_Block_ExtendLeft(Block_ *__pyx_v_that, Block_ const *__pyx_v_more) { Block_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_ExtendLeft", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Block_ExtendLeft", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+1185: that = Block_ExtendLeft_(that, Block_Length(more), Block_At__(more, 0))
__pyx_t_1 = __pyx_f_10bytesparse_2_c_Block_ExtendLeft_(__pyx_v_that, __pyx_f_10bytesparse_2_c_Block_Length(__pyx_v_more), __pyx_f_10bytesparse_2_c_Block_At__(__pyx_v_more, 0)); if (unlikely(__pyx_t_1 == ((Block_ *)NULL))) __PYX_ERR(0, 1185, __pyx_L1_error)
__pyx_v_that = __pyx_t_1;
+1186: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
1187:
1188:
+1189: cdef void Block_RotateLeft__(Block_* that, size_t offset) nogil:
static void __pyx_f_10bytesparse_2_c_Block_RotateLeft__(Block_ *__pyx_v_that, size_t __pyx_v_offset) { size_t __pyx_v_size; byte_t *__pyx_v_data; byte_t __pyx_v_first; /* … */ /* function exit code */ }
1190: cdef:
+1191: size_t size = that.endex - that.start
__pyx_v_size = (__pyx_v_that->endex - __pyx_v_that->start);
+1192: byte_t* data = &that.data[that.start]
__pyx_v_data = (&(__pyx_v_that->data[__pyx_v_that->start]));
1193: byte_t first
1194:
+1195: if size:
__pyx_t_1 = (__pyx_v_size != 0); if (__pyx_t_1) { /* … */ }
+1196: if offset == 1:
__pyx_t_1 = ((__pyx_v_offset == 1) != 0); if (__pyx_t_1) { /* … */ goto __pyx_L4; }
+1197: first = data[0]
__pyx_v_first = (__pyx_v_data[0]);
+1198: size -= 1
__pyx_v_size = (__pyx_v_size - 1);
+1199: while size:
while (1) { __pyx_t_1 = (__pyx_v_size != 0); if (!__pyx_t_1) break;
+1200: data[0] = data[1]
(__pyx_v_data[0]) = (__pyx_v_data[1]);
+1201: data += 1
__pyx_v_data = (__pyx_v_data + 1);
+1202: size -= 1
__pyx_v_size = (__pyx_v_size - 1); }
+1203: data[0] = first
(__pyx_v_data[0]) = __pyx_v_first;
1204:
+1205: elif offset:
__pyx_t_1 = (__pyx_v_offset != 0); if (__pyx_t_1) { /* … */ } __pyx_L4:;
+1206: Reverse(data, 0, offset - 1)
__pyx_f_10bytesparse_2_c_Reverse(__pyx_v_data, 0, (__pyx_v_offset - 1));
+1207: Reverse(data, offset, size - 1)
__pyx_f_10bytesparse_2_c_Reverse(__pyx_v_data, __pyx_v_offset, (__pyx_v_size - 1));
+1208: Reverse(data, 0, size - 1)
__pyx_f_10bytesparse_2_c_Reverse(__pyx_v_data, 0, (__pyx_v_size - 1));
1209:
1210:
+1211: cdef void Block_RotateLeft_(Block_* that, size_t offset) nogil:
static void __pyx_f_10bytesparse_2_c_Block_RotateLeft_(Block_ *__pyx_v_that, size_t __pyx_v_offset) { size_t __pyx_v_size; /* … */ /* function exit code */ }
1212: cdef:
+1213: size_t size = that.endex - that.start
__pyx_v_size = (__pyx_v_that->endex - __pyx_v_that->start);
1214:
+1215: if size:
__pyx_t_1 = (__pyx_v_size != 0); if (__pyx_t_1) { /* … */ }
+1216: if offset >= size:
__pyx_t_1 = ((__pyx_v_offset >= __pyx_v_size) != 0); if (__pyx_t_1) { /* … */ }
1217: with cython.cdivision(True):
+1218: offset = offset % size # no "%=" to avoid zero check
__pyx_v_offset = (__pyx_v_offset % __pyx_v_size);
1219:
+1220: Block_RotateLeft__(that, offset)
__pyx_f_10bytesparse_2_c_Block_RotateLeft__(__pyx_v_that, __pyx_v_offset);
1221:
1222:
+1223: cdef void Block_RotateRight__(Block_* that, size_t offset) nogil:
static void __pyx_f_10bytesparse_2_c_Block_RotateRight__(Block_ *__pyx_v_that, size_t __pyx_v_offset) { size_t __pyx_v_size; byte_t *__pyx_v_data; byte_t __pyx_v_last; /* … */ /* function exit code */ }
1224: cdef:
+1225: size_t size = that.endex - that.start
__pyx_v_size = (__pyx_v_that->endex - __pyx_v_that->start);
+1226: byte_t* data = &that.data[that.start]
__pyx_v_data = (&(__pyx_v_that->data[__pyx_v_that->start]));
1227: byte_t last
1228:
+1229: if size:
__pyx_t_1 = (__pyx_v_size != 0); if (__pyx_t_1) { /* … */ }
+1230: if offset == 1:
__pyx_t_1 = ((__pyx_v_offset == 1) != 0); if (__pyx_t_1) { /* … */ goto __pyx_L4; }
+1231: size -= 1
__pyx_v_size = (__pyx_v_size - 1);
+1232: if size:
__pyx_t_1 = (__pyx_v_size != 0); if (__pyx_t_1) { /* … */ }
+1233: data += size
__pyx_v_data = (__pyx_v_data + __pyx_v_size);
+1234: last = data[0]
__pyx_v_last = (__pyx_v_data[0]);
+1235: while size:
while (1) { __pyx_t_1 = (__pyx_v_size != 0); if (!__pyx_t_1) break;
+1236: size -= 1
__pyx_v_size = (__pyx_v_size - 1);
+1237: data -= 1
__pyx_v_data = (__pyx_v_data - 1);
+1238: data[1] = data[0]
(__pyx_v_data[1]) = (__pyx_v_data[0]); }
+1239: data[0] = last
(__pyx_v_data[0]) = __pyx_v_last;
1240:
+1241: elif offset:
__pyx_t_1 = (__pyx_v_offset != 0); if (__pyx_t_1) { /* … */ } __pyx_L4:;
+1242: offset = size - offset
__pyx_v_offset = (__pyx_v_size - __pyx_v_offset);
+1243: Reverse(data, 0, offset - 1)
__pyx_f_10bytesparse_2_c_Reverse(__pyx_v_data, 0, (__pyx_v_offset - 1));
+1244: Reverse(data, offset, size - 1)
__pyx_f_10bytesparse_2_c_Reverse(__pyx_v_data, __pyx_v_offset, (__pyx_v_size - 1));
+1245: Reverse(data, 0, size - 1)
__pyx_f_10bytesparse_2_c_Reverse(__pyx_v_data, 0, (__pyx_v_size - 1));
1246:
1247:
+1248: cdef void Block_RotateRight_(Block_* that, size_t offset) nogil:
static void __pyx_f_10bytesparse_2_c_Block_RotateRight_(Block_ *__pyx_v_that, size_t __pyx_v_offset) { size_t __pyx_v_size; /* … */ /* function exit code */ }
1249: cdef:
+1250: size_t size = that.endex - that.start
__pyx_v_size = (__pyx_v_that->endex - __pyx_v_that->start);
1251:
+1252: if size:
__pyx_t_1 = (__pyx_v_size != 0); if (__pyx_t_1) { /* … */ }
+1253: if offset >= size:
__pyx_t_1 = ((__pyx_v_offset >= __pyx_v_size) != 0); if (__pyx_t_1) { /* … */ }
1254: with cython.cdivision(True):
+1255: offset = offset % size # no "%=" to avoid zero check
__pyx_v_offset = (__pyx_v_offset % __pyx_v_size);
1256:
+1257: Block_RotateRight__(that, offset)
__pyx_f_10bytesparse_2_c_Block_RotateRight__(__pyx_v_that, __pyx_v_offset);
1258:
1259:
+1260: cdef void Block_Rotate(Block_* that, ssize_t offset) nogil:
static void __pyx_f_10bytesparse_2_c_Block_Rotate(Block_ *__pyx_v_that, Py_ssize_t __pyx_v_offset) { /* … */ /* function exit code */ }
+1261: if offset < 0:
__pyx_t_1 = ((__pyx_v_offset < 0) != 0); if (__pyx_t_1) { /* … */ goto __pyx_L3; }
+1262: Block_RotateLeft_(that, <size_t>-offset)
__pyx_f_10bytesparse_2_c_Block_RotateLeft_(__pyx_v_that, ((size_t)(-__pyx_v_offset)));
1263: else:
+1264: Block_RotateRight_(that, <size_t>offset)
/*else*/ { __pyx_f_10bytesparse_2_c_Block_RotateRight_(__pyx_v_that, ((size_t)__pyx_v_offset)); } __pyx_L3:;
1265:
1266:
+1267: cdef Block_* Block_Repeat(Block_* that, size_t times) except NULL:
static Block_ *__pyx_f_10bytesparse_2_c_Block_Repeat(Block_ *__pyx_v_that, size_t __pyx_v_times) { size_t __pyx_v_size; byte_t *__pyx_v_src; byte_t *__pyx_v_dst; Block_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_Repeat", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("bytesparse._c.Block_Repeat", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
1268: cdef:
1269: size_t size
1270: byte_t* src
1271: byte_t* dst
1272:
+1273: if times == 1:
__pyx_t_1 = ((__pyx_v_times == 1) != 0); if (__pyx_t_1) { /* … */ }
+1274: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
1275:
+1276: elif times < 1:
__pyx_t_1 = ((__pyx_v_times < 1) != 0); if (__pyx_t_1) { /* … */ }
+1277: return Block_Clear(that)
__pyx_t_2 = __pyx_f_10bytesparse_2_c_Block_Clear(__pyx_v_that); if (unlikely(__pyx_t_2 == ((Block_ *)NULL))) __PYX_ERR(0, 1277, __pyx_L1_error)
__pyx_r = __pyx_t_2;
goto __pyx_L0;
1278:
1279: else:
+1280: size = that.endex - that.start
/*else*/ { __pyx_v_size = (__pyx_v_that->endex - __pyx_v_that->start);
1281: with cython.cdivision(True):
+1282: if size > SIZE_HMAX // times:
__pyx_t_1 = ((__pyx_v_size > (SIZE_HMAX / __pyx_v_times)) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+1283: raise OverflowError()
__pyx_t_3 = __Pyx_PyObject_CallNoArg(__pyx_builtin_OverflowError); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1283, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_Raise(__pyx_t_3, 0, 0, 0); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(0, 1283, __pyx_L1_error)
1284:
+1285: times -= 1
__pyx_v_times = (__pyx_v_times - 1);
+1286: that = Block_Reserve_(that, size, size * times, False)
__pyx_t_2 = __pyx_f_10bytesparse_2_c_Block_Reserve_(__pyx_v_that, __pyx_v_size, (__pyx_v_size * __pyx_v_times), 0); if (unlikely(__pyx_t_2 == ((Block_ *)NULL))) __PYX_ERR(0, 1286, __pyx_L1_error)
__pyx_v_that = __pyx_t_2;
+1287: src = &that.data[that.start]
__pyx_v_src = (&(__pyx_v_that->data[__pyx_v_that->start]));
+1288: dst = src
__pyx_v_dst = __pyx_v_src;
1289:
+1290: while times:
while (1) { __pyx_t_1 = (__pyx_v_times != 0); if (!__pyx_t_1) break;
+1291: times -= 1
__pyx_v_times = (__pyx_v_times - 1);
+1292: dst += size
__pyx_v_dst = (__pyx_v_dst + __pyx_v_size);
+1293: memcpy(dst, src, size) # whole repetition
(void)(memcpy(__pyx_v_dst, __pyx_v_src, __pyx_v_size)); }
1294:
+1295: return that
__pyx_r = __pyx_v_that; goto __pyx_L0; }
1296:
1297:
+1298: cdef Block_* Block_RepeatToSize(Block_* that, size_t size) except NULL:
static Block_ *__pyx_f_10bytesparse_2_c_Block_RepeatToSize(Block_ *__pyx_v_that, size_t __pyx_v_size) { size_t __pyx_v_size2; size_t __pyx_v_times; byte_t *__pyx_v_src; byte_t *__pyx_v_dst; Block_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_RepeatToSize", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.Block_RepeatToSize", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
1299: cdef:
1300: size_t size2
1301: size_t times
1302: byte_t* src
1303: byte_t* dst
1304:
+1305: size2 = that.endex - that.start
__pyx_v_size2 = (__pyx_v_that->endex - __pyx_v_that->start);
1306:
+1307: if size2 == 0:
__pyx_t_1 = ((__pyx_v_size2 == 0) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+1308: raise RuntimeError('empty')
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_RuntimeError, __pyx_tuple__8, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1308, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 1308, __pyx_L1_error) /* … */ __pyx_tuple__8 = PyTuple_Pack(1, __pyx_n_u_empty); if (unlikely(!__pyx_tuple__8)) __PYX_ERR(0, 1308, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__8); __Pyx_GIVEREF(__pyx_tuple__8);
1309:
+1310: if size == size2:
__pyx_t_1 = ((__pyx_v_size == __pyx_v_size2) != 0); if (__pyx_t_1) { /* … */ }
+1311: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
1312:
+1313: elif size < size2:
__pyx_t_1 = ((__pyx_v_size < __pyx_v_size2) != 0); if (__pyx_t_1) { /* … */ }
+1314: return Block_DelSlice_(that, size, size2)
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Block_DelSlice_(__pyx_v_that, __pyx_v_size, __pyx_v_size2); if (unlikely(__pyx_t_3 == ((Block_ *)NULL))) __PYX_ERR(0, 1314, __pyx_L1_error)
__pyx_r = __pyx_t_3;
goto __pyx_L0;
1315:
1316: else: # size > size2
+1317: that = Block_Reserve_(that, size2, size - size2, False)
/*else*/ {
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Block_Reserve_(__pyx_v_that, __pyx_v_size2, (__pyx_v_size - __pyx_v_size2), 0); if (unlikely(__pyx_t_3 == ((Block_ *)NULL))) __PYX_ERR(0, 1317, __pyx_L1_error)
__pyx_v_that = __pyx_t_3;
1318:
+1319: if that.start + 1 == that.endex: # single byte
__pyx_t_1 = (((__pyx_v_that->start + 1) == __pyx_v_that->endex) != 0); if (__pyx_t_1) { /* … */ goto __pyx_L5; }
+1320: dst = &that.data[that.start]
__pyx_v_dst = (&(__pyx_v_that->data[__pyx_v_that->start]));
+1321: memset(dst, dst[0], size)
(void)(memset(__pyx_v_dst, (__pyx_v_dst[0]), __pyx_v_size));
1322:
1323: else: # multiple bytes
+1324: with cython.cdivision(True):
/*else*/ {
+1325: times = size // size2
__pyx_v_times = (__pyx_v_size / __pyx_v_size2);
1326:
1327: # Copy the final partial chunk
+1328: src = &that.data[that.start]
__pyx_v_src = (&(__pyx_v_that->data[__pyx_v_that->start]));
+1329: dst = &that.data[that.start + (size2 * times)]
__pyx_v_dst = (&(__pyx_v_that->data[(__pyx_v_that->start + (__pyx_v_size2 * __pyx_v_times))]));
+1330: memcpy(dst, src, size - (size2 * times))
(void)(memcpy(__pyx_v_dst, __pyx_v_src, (__pyx_v_size - (__pyx_v_size2 * __pyx_v_times))));
1331:
1332: # Copy the multiple times, skipping the first one
+1333: dst = src + size2
__pyx_v_dst = (__pyx_v_src + __pyx_v_size2);
+1334: times -= 1
__pyx_v_times = (__pyx_v_times - 1);
+1335: while times:
while (1) { __pyx_t_1 = (__pyx_v_times != 0); if (!__pyx_t_1) break;
+1336: memcpy(dst, src, size2)
(void)(memcpy(__pyx_v_dst, __pyx_v_src, __pyx_v_size2));
+1337: dst += size2
__pyx_v_dst = (__pyx_v_dst + __pyx_v_size2);
+1338: times -= 1
__pyx_v_times = (__pyx_v_times - 1); } } __pyx_L5:;
1339:
+1340: return that
__pyx_r = __pyx_v_that; goto __pyx_L0; }
1341:
1342:
+1343: cdef vint Block_Read_(const Block_* that, size_t offset, size_t size, byte_t* buffer) except -1:
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_Block_Read_(Block_ const *__pyx_v_that, size_t __pyx_v_offset, size_t __pyx_v_size, byte_t *__pyx_v_buffer) { __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_Read_", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.Block_Read_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+1344: if size:
__pyx_t_1 = (__pyx_v_size != 0); if (__pyx_t_1) { /* … */ }
+1345: if size > SIZE_HMAX:
__pyx_t_1 = ((__pyx_v_size > SIZE_HMAX) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+1346: raise OverflowError('size overflow')
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_OverflowError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1346, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 1346, __pyx_L1_error)
1347:
+1348: CheckAddSizeU(offset, that.start)
__pyx_t_3 = __pyx_f_10bytesparse_2_c_CheckAddSizeU(__pyx_v_offset, __pyx_v_that->start); if (unlikely(__pyx_t_3 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 1348, __pyx_L1_error)
+1349: offset += that.start
__pyx_v_offset = (__pyx_v_offset + __pyx_v_that->start);
1350:
+1351: CheckAddSizeU(offset, size)
__pyx_t_3 = __pyx_f_10bytesparse_2_c_CheckAddSizeU(__pyx_v_offset, __pyx_v_size); if (unlikely(__pyx_t_3 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 1351, __pyx_L1_error)
+1352: if that.endex < offset + size:
__pyx_t_1 = ((__pyx_v_that->endex < (__pyx_v_offset + __pyx_v_size)) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+1353: raise IndexError('index out of range')
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_IndexError, __pyx_tuple__6, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1353, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 1353, __pyx_L1_error)
1354:
+1355: memmove(buffer, &that.data[offset], size * sizeof(byte_t))
(void)(memmove(__pyx_v_buffer, (&(__pyx_v_that->data[__pyx_v_offset])), (__pyx_v_size * (sizeof(byte_t)))));
1356:
1357:
+1358: cdef Block_* Block_Write_(Block_* that, size_t offset, size_t size, const byte_t* buffer) except NULL:
static Block_ *__pyx_f_10bytesparse_2_c_Block_Write_(Block_ *__pyx_v_that, size_t __pyx_v_offset, size_t __pyx_v_size, byte_t const *__pyx_v_buffer) { Block_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_Write_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Block_Write_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
1359: # Block_CheckMutable(that)
1360:
+1361: if size:
__pyx_t_1 = (__pyx_v_size != 0); if (__pyx_t_1) { /* … */ }
+1362: CheckAddSizeU(that.start, offset)
__pyx_t_2 = __pyx_f_10bytesparse_2_c_CheckAddSizeU(__pyx_v_that->start, __pyx_v_offset); if (unlikely(__pyx_t_2 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 1362, __pyx_L1_error)
+1363: offset += that.start
__pyx_v_offset = (__pyx_v_offset + __pyx_v_that->start);
1364:
+1365: CheckAddSizeU(offset, size)
__pyx_t_2 = __pyx_f_10bytesparse_2_c_CheckAddSizeU(__pyx_v_offset, __pyx_v_size); if (unlikely(__pyx_t_2 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 1365, __pyx_L1_error)
+1366: if that.endex < offset + size:
__pyx_t_1 = ((__pyx_v_that->endex < (__pyx_v_offset + __pyx_v_size)) != 0); if (__pyx_t_1) { /* … */ }
+1367: that = Block_Reserve_(that, that.endex - that.start, (offset + size) - that.endex, False)
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Block_Reserve_(__pyx_v_that, (__pyx_v_that->endex - __pyx_v_that->start), ((__pyx_v_offset + __pyx_v_size) - __pyx_v_that->endex), 0); if (unlikely(__pyx_t_3 == ((Block_ *)NULL))) __PYX_ERR(0, 1367, __pyx_L1_error)
__pyx_v_that = __pyx_t_3;
1368:
+1369: memmove(&that.data[offset], buffer, size * sizeof(byte_t))
(void)(memmove((&(__pyx_v_that->data[__pyx_v_offset])), __pyx_v_buffer, (__pyx_v_size * (sizeof(byte_t)))));
+1370: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
1371:
1372:
+1373: cdef vint Block_ReadSlice_(const Block_* that, size_t start, size_t endex,
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_Block_ReadSlice_(Block_ const *__pyx_v_that, size_t __pyx_v_start, size_t __pyx_v_endex, size_t *__pyx_v_size_, byte_t *__pyx_v_buffer) { size_t __pyx_v_size; __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_ReadSlice_", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.Block_ReadSlice_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
1374: size_t* size_, byte_t* buffer) except -1:
1375: cdef:
+1376: size_t size = that.endex - that.start
__pyx_v_size = (__pyx_v_that->endex - __pyx_v_that->start);
1377:
+1378: size_[0] = 0
(__pyx_v_size_[0]) = 0;
1379:
+1380: if start > SIZE_HMAX:
__pyx_t_1 = ((__pyx_v_start > SIZE_HMAX) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+1381: raise OverflowError('size overflow')
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_OverflowError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1381, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 1381, __pyx_L1_error)
+1382: elif start > size:
__pyx_t_1 = ((__pyx_v_start > __pyx_v_size) != 0); if (__pyx_t_1) { /* … */ }
+1383: start = size # trim source start
__pyx_v_start = __pyx_v_size;
1384:
+1385: if endex > SIZE_HMAX:
__pyx_t_1 = ((__pyx_v_endex > SIZE_HMAX) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+1386: raise OverflowError('size overflow')
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_OverflowError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1386, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 1386, __pyx_L1_error)
+1387: elif endex < start:
__pyx_t_1 = ((__pyx_v_endex < __pyx_v_start) != 0); if (__pyx_t_1) { /* … */ goto __pyx_L4; }
+1388: endex = start # clamp negative source length
__pyx_v_endex = __pyx_v_start;
+1389: elif endex > size:
__pyx_t_1 = ((__pyx_v_endex > __pyx_v_size) != 0); if (__pyx_t_1) { /* … */ } __pyx_L4:;
+1390: endex = size # trim source end
__pyx_v_endex = __pyx_v_size;
1391:
+1392: size = endex - start
__pyx_v_size = (__pyx_v_endex - __pyx_v_start);
+1393: Block_Read_(that, start, size, buffer)
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Block_Read_(__pyx_v_that, __pyx_v_start, __pyx_v_size, __pyx_v_buffer); if (unlikely(__pyx_t_3 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 1393, __pyx_L1_error)
+1394: size_[0] = size
(__pyx_v_size_[0]) = __pyx_v_size;
1395:
1396:
+1397: cdef vint Block_ReadSlice(const Block_* that, ssize_t start, ssize_t endex,
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_Block_ReadSlice(Block_ const *__pyx_v_that, Py_ssize_t __pyx_v_start, Py_ssize_t __pyx_v_endex, size_t *__pyx_v_size_, byte_t *__pyx_v_buffer) { Py_ssize_t __pyx_v_ssize; __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_ReadSlice", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Block_ReadSlice", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
1398: size_t* size_, byte_t* buffer) except -1:
1399: cdef:
+1400: ssize_t ssize = <ssize_t>(that.endex - that.start)
__pyx_v_ssize = ((Py_ssize_t)(__pyx_v_that->endex - __pyx_v_that->start));
1401:
+1402: if start < 0:
__pyx_t_1 = ((__pyx_v_start < 0) != 0); if (__pyx_t_1) { /* … */ }
+1403: start += ssize # anchor to end
__pyx_v_start = (__pyx_v_start + __pyx_v_ssize);
+1404: if start < 0:
__pyx_t_1 = ((__pyx_v_start < 0) != 0); if (__pyx_t_1) { /* … */ }
+1405: start = 0 # trim source start
__pyx_v_start = 0;
1406:
+1407: if endex < 0:
__pyx_t_1 = ((__pyx_v_endex < 0) != 0); if (__pyx_t_1) { /* … */ }
+1408: endex += ssize # anchor to end
__pyx_v_endex = (__pyx_v_endex + __pyx_v_ssize);
+1409: if endex < start:
__pyx_t_1 = ((__pyx_v_endex < __pyx_v_start) != 0); if (__pyx_t_1) { /* … */ }
+1410: endex = start # clamp negative source length
__pyx_v_endex = __pyx_v_start;
1411:
+1412: Block_ReadSlice_(that, <size_t>start, <size_t>endex, size_, buffer)
__pyx_t_2 = __pyx_f_10bytesparse_2_c_Block_ReadSlice_(__pyx_v_that, ((size_t)__pyx_v_start), ((size_t)__pyx_v_endex), __pyx_v_size_, __pyx_v_buffer); if (unlikely(__pyx_t_2 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 1412, __pyx_L1_error)
1413:
1414:
+1415: cdef Block_* Block_GetSlice_(const Block_* that, size_t start, size_t endex) except NULL:
static Block_ *__pyx_f_10bytesparse_2_c_Block_GetSlice_(Block_ const *__pyx_v_that, size_t __pyx_v_start, size_t __pyx_v_endex) { size_t __pyx_v_size; Block_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_GetSlice_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.Block_GetSlice_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
1416: cdef:
+1417: size_t size = that.endex - that.start
__pyx_v_size = (__pyx_v_that->endex - __pyx_v_that->start);
1418:
+1419: if start > SIZE_HMAX:
__pyx_t_1 = ((__pyx_v_start > SIZE_HMAX) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+1420: raise OverflowError('size overflow')
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_OverflowError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1420, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 1420, __pyx_L1_error)
+1421: elif start > size:
__pyx_t_1 = ((__pyx_v_start > __pyx_v_size) != 0); if (__pyx_t_1) { /* … */ }
+1422: start = size # trim source start
__pyx_v_start = __pyx_v_size;
1423:
+1424: if endex > SIZE_HMAX:
__pyx_t_1 = ((__pyx_v_endex > SIZE_HMAX) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+1425: raise OverflowError('size overflow')
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_OverflowError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1425, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 1425, __pyx_L1_error)
+1426: elif endex < start:
__pyx_t_1 = ((__pyx_v_endex < __pyx_v_start) != 0); if (__pyx_t_1) { /* … */ goto __pyx_L4; }
+1427: endex = start # clamp negative source length
__pyx_v_endex = __pyx_v_start;
+1428: elif endex > size:
__pyx_t_1 = ((__pyx_v_endex > __pyx_v_size) != 0); if (__pyx_t_1) { /* … */ } __pyx_L4:;
+1429: endex = size # trim source end
__pyx_v_endex = __pyx_v_size;
1430:
+1431: return Block_Create(that.address + start, endex - start, &that.data[that.start + start])
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Block_Create((__pyx_v_that->address + __pyx_v_start), (__pyx_v_endex - __pyx_v_start), (&(__pyx_v_that->data[(__pyx_v_that->start + __pyx_v_start)]))); if (unlikely(__pyx_t_3 == ((Block_ *)NULL))) __PYX_ERR(0, 1431, __pyx_L1_error)
__pyx_r = __pyx_t_3;
goto __pyx_L0;
1432:
1433:
+1434: cdef Block_* Block_GetSlice(const Block_* that, ssize_t start, ssize_t endex) except NULL:
static Block_ *__pyx_f_10bytesparse_2_c_Block_GetSlice(Block_ const *__pyx_v_that, Py_ssize_t __pyx_v_start, Py_ssize_t __pyx_v_endex) { Py_ssize_t __pyx_v_ssize; Block_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_GetSlice", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Block_GetSlice", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
1435: cdef:
+1436: ssize_t ssize = <ssize_t>(that.endex - that.start)
__pyx_v_ssize = ((Py_ssize_t)(__pyx_v_that->endex - __pyx_v_that->start));
1437:
+1438: if start < 0:
__pyx_t_1 = ((__pyx_v_start < 0) != 0); if (__pyx_t_1) { /* … */ }
+1439: start += ssize # anchor to end
__pyx_v_start = (__pyx_v_start + __pyx_v_ssize);
+1440: if start < 0:
__pyx_t_1 = ((__pyx_v_start < 0) != 0); if (__pyx_t_1) { /* … */ }
+1441: start = 0 # trim source start
__pyx_v_start = 0;
1442:
+1443: if endex < 0:
__pyx_t_1 = ((__pyx_v_endex < 0) != 0); if (__pyx_t_1) { /* … */ }
+1444: endex += ssize # anchor to end
__pyx_v_endex = (__pyx_v_endex + __pyx_v_ssize);
+1445: if endex < start:
__pyx_t_1 = ((__pyx_v_endex < __pyx_v_start) != 0); if (__pyx_t_1) { /* … */ }
+1446: endex = start # clamp negative source length
__pyx_v_endex = __pyx_v_start;
1447:
+1448: return Block_GetSlice_(that, <size_t>start, <size_t>endex)
__pyx_t_2 = __pyx_f_10bytesparse_2_c_Block_GetSlice_(__pyx_v_that, ((size_t)__pyx_v_start), ((size_t)__pyx_v_endex)); if (unlikely(__pyx_t_2 == ((Block_ *)NULL))) __PYX_ERR(0, 1448, __pyx_L1_error)
__pyx_r = __pyx_t_2;
goto __pyx_L0;
1449:
1450:
+1451: cdef Block_* Block_WriteSlice_(Block_* that, size_t start, size_t endex,
static Block_ *__pyx_f_10bytesparse_2_c_Block_WriteSlice_(Block_ *__pyx_v_that, size_t __pyx_v_start, size_t __pyx_v_endex, size_t __pyx_v_size, byte_t const *__pyx_v_buffer) { size_t __pyx_v_size2; Block_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_WriteSlice_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.Block_WriteSlice_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
1452: size_t size, const byte_t* buffer) except NULL:
1453: cdef:
1454: size_t size2 # source size
1455:
+1456: size2 = size
__pyx_v_size2 = __pyx_v_size;
+1457: size = that.endex - that.start
__pyx_v_size = (__pyx_v_that->endex - __pyx_v_that->start);
1458:
+1459: if start > SIZE_HMAX:
__pyx_t_1 = ((__pyx_v_start > SIZE_HMAX) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+1460: raise OverflowError('size overflow')
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_OverflowError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1460, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 1460, __pyx_L1_error)
+1461: elif start > size:
__pyx_t_1 = ((__pyx_v_start > __pyx_v_size) != 0); if (__pyx_t_1) { /* … */ }
+1462: start = size # trim target start
__pyx_v_start = __pyx_v_size;
1463:
+1464: if endex > SIZE_HMAX:
__pyx_t_1 = ((__pyx_v_endex > SIZE_HMAX) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+1465: raise OverflowError('size overflow')
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_OverflowError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1465, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 1465, __pyx_L1_error)
+1466: elif endex > size:
__pyx_t_1 = ((__pyx_v_endex > __pyx_v_size) != 0); if (__pyx_t_1) { /* … */ }
+1467: endex = size # trim target end
__pyx_v_endex = __pyx_v_size;
1468:
+1469: if endex < start:
__pyx_t_1 = ((__pyx_v_endex < __pyx_v_start) != 0); if (__pyx_t_1) { /* … */ }
+1470: endex = start # clamp negative target length
__pyx_v_endex = __pyx_v_start;
+1471: size = endex - start
__pyx_v_size = (__pyx_v_endex - __pyx_v_start);
1472:
+1473: if size2 > size: # enlarge target at range end
__pyx_t_1 = ((__pyx_v_size2 > __pyx_v_size) != 0); if (__pyx_t_1) { /* … */ goto __pyx_L6; }
+1474: that = Block_Reserve_(that, endex, size2 - size, False)
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Block_Reserve_(__pyx_v_that, __pyx_v_endex, (__pyx_v_size2 - __pyx_v_size), 0); if (unlikely(__pyx_t_3 == ((Block_ *)NULL))) __PYX_ERR(0, 1474, __pyx_L1_error)
__pyx_v_that = __pyx_t_3;
1475:
+1476: elif size > size2: # shrink target at range end
__pyx_t_1 = ((__pyx_v_size > __pyx_v_size2) != 0); if (__pyx_t_1) { /* … */ } __pyx_L6:;
+1477: endex -= size - size2
__pyx_v_endex = (__pyx_v_endex - (__pyx_v_size - __pyx_v_size2));
+1478: that = Block_Delete_(that, endex, size - size2)
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Block_Delete_(__pyx_v_that, __pyx_v_endex, (__pyx_v_size - __pyx_v_size2)); if (unlikely(__pyx_t_3 == ((Block_ *)NULL))) __PYX_ERR(0, 1478, __pyx_L1_error)
__pyx_v_that = __pyx_t_3;
1479:
+1480: that = Block_Write_(that, start, size2, buffer)
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Block_Write_(__pyx_v_that, __pyx_v_start, __pyx_v_size2, __pyx_v_buffer); if (unlikely(__pyx_t_3 == ((Block_ *)NULL))) __PYX_ERR(0, 1480, __pyx_L1_error)
__pyx_v_that = __pyx_t_3;
+1481: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
1482:
1483:
+1484: cdef Block_* Block_WriteSlice(Block_* that, ssize_t start, ssize_t endex,
static Block_ *__pyx_f_10bytesparse_2_c_Block_WriteSlice(Block_ *__pyx_v_that, Py_ssize_t __pyx_v_start, Py_ssize_t __pyx_v_endex, size_t __pyx_v_size, byte_t const *__pyx_v_buffer) { Py_ssize_t __pyx_v_ssize; Py_ssize_t __pyx_v_ssize2; Py_ssize_t __pyx_v_start2; Py_ssize_t __pyx_v_endex2; Block_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_WriteSlice", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Block_WriteSlice", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
1485: size_t size, const byte_t* buffer) except NULL:
1486: cdef:
1487: ssize_t ssize # target size
1488: ssize_t ssize2 # source size
1489: ssize_t start2 # source start
1490: ssize_t endex2 # source end
1491:
+1492: start2 = 0
__pyx_v_start2 = 0;
+1493: endex2 = <ssize_t>size
__pyx_v_endex2 = ((Py_ssize_t)__pyx_v_size);
1494:
+1495: ssize = <ssize_t>(that.endex - that.start)
__pyx_v_ssize = ((Py_ssize_t)(__pyx_v_that->endex - __pyx_v_that->start));
1496:
+1497: if start < 0:
__pyx_t_1 = ((__pyx_v_start < 0) != 0); if (__pyx_t_1) { /* … */ }
+1498: start += ssize # anchor to end
__pyx_v_start = (__pyx_v_start + __pyx_v_ssize);
+1499: if start < 0:
__pyx_t_1 = ((__pyx_v_start < 0) != 0); if (__pyx_t_1) { /* … */ }
1500: # start2 -= start # skip initial source data # as per bytearray
+1501: start = 0 # trim target start
__pyx_v_start = 0;
+1502: if start2 > endex2:
__pyx_t_1 = ((__pyx_v_start2 > __pyx_v_endex2) != 0); if (__pyx_t_1) { /* … */ }
+1503: start2 = endex2 # clamp source start
__pyx_v_start2 = __pyx_v_endex2;
1504:
+1505: if endex < 0:
__pyx_t_1 = ((__pyx_v_endex < 0) != 0); if (__pyx_t_1) { /* … */ }
+1506: endex += ssize # anchor to end
__pyx_v_endex = (__pyx_v_endex + __pyx_v_ssize);
+1507: if endex < start:
__pyx_t_1 = ((__pyx_v_endex < __pyx_v_start) != 0); if (__pyx_t_1) { /* … */ }
+1508: endex = start # clamp negative target length
__pyx_v_endex = __pyx_v_start;
1509:
+1510: if endex2 < start2:
__pyx_t_1 = ((__pyx_v_endex2 < __pyx_v_start2) != 0); if (__pyx_t_1) { /* … */ }
+1511: endex2 = start2 # clamp negative source length
__pyx_v_endex2 = __pyx_v_start2;
+1512: ssize2 = endex2 - start2
__pyx_v_ssize2 = (__pyx_v_endex2 - __pyx_v_start2);
1513:
+1514: that = Block_WriteSlice_(that, <size_t>start, <size_t>endex, <size_t>ssize2, &buffer[start2])
__pyx_t_2 = __pyx_f_10bytesparse_2_c_Block_WriteSlice_(__pyx_v_that, ((size_t)__pyx_v_start), ((size_t)__pyx_v_endex), ((size_t)__pyx_v_ssize2), (&(__pyx_v_buffer[__pyx_v_start2]))); if (unlikely(__pyx_t_2 == ((Block_ *)NULL))) __PYX_ERR(0, 1514, __pyx_L1_error)
__pyx_v_that = __pyx_t_2;
+1515: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
1516:
1517:
+1518: cdef Block_* Block_SetSlice_(Block_* that, size_t start, size_t endex,
static Block_ *__pyx_f_10bytesparse_2_c_Block_SetSlice_(Block_ *__pyx_v_that, size_t __pyx_v_start, size_t __pyx_v_endex, Block_ const *__pyx_v_src, size_t __pyx_v_start2, size_t __pyx_v_endex2) { size_t __pyx_v_size2; PyObject *__pyx_v_size = NULL; Block_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_SetSlice_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_6); __Pyx_AddTraceback("bytesparse._c.Block_SetSlice_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XDECREF(__pyx_v_size); __Pyx_RefNannyFinishContext(); return __pyx_r; }
1519: const Block_* src, size_t start2, size_t endex2) except NULL:
1520: cdef:
1521: size_t size2 # source size
1522:
+1523: size2 = src.endex - src.start
__pyx_v_size2 = (__pyx_v_src->endex - __pyx_v_src->start);
1524:
+1525: if start2 > SIZE_HMAX:
__pyx_t_1 = ((__pyx_v_start2 > SIZE_HMAX) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+1526: raise OverflowError('size overflow')
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_OverflowError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1526, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 1526, __pyx_L1_error)
+1527: elif start2 > size2:
__pyx_t_1 = ((__pyx_v_start2 > __pyx_v_size2) != 0); if (__pyx_t_1) { /* … */ }
+1528: start2 = size2 # trim source start
__pyx_v_start2 = __pyx_v_size2;
1529:
+1530: if endex2 > SIZE_HMAX:
__pyx_t_1 = ((__pyx_v_endex2 > SIZE_HMAX) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+1531: raise OverflowError('size overflow')
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_OverflowError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1531, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 1531, __pyx_L1_error)
+1532: elif endex2 > size2:
__pyx_t_1 = ((__pyx_v_endex2 > __pyx_v_size2) != 0); if (__pyx_t_1) { /* … */ }
+1533: endex2 = size2 # trim source end
__pyx_v_endex2 = __pyx_v_size2;
1534:
+1535: if endex2 < start2:
__pyx_t_1 = ((__pyx_v_endex2 < __pyx_v_start2) != 0); if (__pyx_t_1) { /* … */ }
+1536: endex2 = start2 # clamp negative source length
__pyx_v_endex2 = __pyx_v_start2;
+1537: size2 = endex2 - start2
__pyx_v_size2 = (__pyx_v_endex2 - __pyx_v_start2);
1538:
+1539: size = that.endex - that.start
__pyx_t_2 = __Pyx_PyInt_FromSize_t((__pyx_v_that->endex - __pyx_v_that->start)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1539, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_v_size = __pyx_t_2; __pyx_t_2 = 0;
1540:
+1541: if start > SIZE_HMAX:
__pyx_t_1 = ((__pyx_v_start > SIZE_HMAX) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+1542: raise OverflowError('size overflow')
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_OverflowError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1542, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 1542, __pyx_L1_error)
+1543: elif start > size:
__pyx_t_2 = __Pyx_PyInt_FromSize_t(__pyx_v_start); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1543, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_3 = PyObject_RichCompare(__pyx_t_2, __pyx_v_size, Py_GT); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1543, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 1543, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; if (__pyx_t_1) { /* … */ }
+1544: start = size # trim target start
__pyx_t_4 = __Pyx_PyInt_As_size_t(__pyx_v_size); if (unlikely((__pyx_t_4 == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 1544, __pyx_L1_error) __pyx_v_start = __pyx_t_4;
1545:
+1546: if endex > SIZE_HMAX:
__pyx_t_1 = ((__pyx_v_endex > SIZE_HMAX) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+1547: raise OverflowError('size overflow')
__pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_OverflowError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1547, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_Raise(__pyx_t_3, 0, 0, 0); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(0, 1547, __pyx_L1_error)
+1548: elif endex > size:
__pyx_t_3 = __Pyx_PyInt_FromSize_t(__pyx_v_endex); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1548, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_2 = PyObject_RichCompare(__pyx_t_3, __pyx_v_size, Py_GT); __Pyx_XGOTREF(__pyx_t_2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1548, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_2); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 1548, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; if (__pyx_t_1) { /* … */ }
+1549: endex = size # trim target end
__pyx_t_4 = __Pyx_PyInt_As_size_t(__pyx_v_size); if (unlikely((__pyx_t_4 == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 1549, __pyx_L1_error) __pyx_v_endex = __pyx_t_4;
1550:
+1551: if endex < start:
__pyx_t_1 = ((__pyx_v_endex < __pyx_v_start) != 0); if (__pyx_t_1) { /* … */ }
+1552: endex = start # clamp negative target length
__pyx_v_endex = __pyx_v_start;
+1553: size = endex - start
__pyx_t_2 = __Pyx_PyInt_FromSize_t((__pyx_v_endex - __pyx_v_start)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1553, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF_SET(__pyx_v_size, __pyx_t_2); __pyx_t_2 = 0;
1554:
+1555: if size2 > size: # enlarge target at range end
__pyx_t_2 = __Pyx_PyInt_FromSize_t(__pyx_v_size2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1555, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_3 = PyObject_RichCompare(__pyx_t_2, __pyx_v_size, Py_GT); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1555, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 1555, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; if (__pyx_t_1) { /* … */ goto __pyx_L9; }
+1556: that = Block_Reserve_(that, endex, size2 - size, False)
__pyx_t_3 = __Pyx_PyInt_FromSize_t(__pyx_v_size2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1556, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_2 = PyNumber_Subtract(__pyx_t_3, __pyx_v_size); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1556, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __pyx_t_4 = __Pyx_PyInt_As_size_t(__pyx_t_2); if (unlikely((__pyx_t_4 == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 1556, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_t_5 = __pyx_f_10bytesparse_2_c_Block_Reserve_(__pyx_v_that, __pyx_v_endex, __pyx_t_4, 0); if (unlikely(__pyx_t_5 == ((Block_ *)NULL))) __PYX_ERR(0, 1556, __pyx_L1_error) __pyx_v_that = __pyx_t_5;
1557:
+1558: elif size > size2: # shrink target at range end
__pyx_t_2 = __Pyx_PyInt_FromSize_t(__pyx_v_size2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1558, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_3 = PyObject_RichCompare(__pyx_v_size, __pyx_t_2, Py_GT); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1558, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 1558, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; if (__pyx_t_1) { /* … */ } __pyx_L9:;
+1559: endex -= size - size2
__pyx_t_3 = __Pyx_PyInt_FromSize_t(__pyx_v_endex); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1559, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_2 = __Pyx_PyInt_FromSize_t(__pyx_v_size2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1559, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_6 = PyNumber_Subtract(__pyx_v_size, __pyx_t_2); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1559, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_t_2 = PyNumber_InPlaceSubtract(__pyx_t_3, __pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1559, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __pyx_t_4 = __Pyx_PyInt_As_size_t(__pyx_t_2); if (unlikely((__pyx_t_4 == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 1559, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_v_endex = __pyx_t_4;
+1560: that = Block_Delete_(that, endex, size - size2)
__pyx_t_2 = __Pyx_PyInt_FromSize_t(__pyx_v_size2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1560, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_6 = PyNumber_Subtract(__pyx_v_size, __pyx_t_2); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1560, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_t_4 = __Pyx_PyInt_As_size_t(__pyx_t_6); if (unlikely((__pyx_t_4 == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 1560, __pyx_L1_error) __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __pyx_t_5 = __pyx_f_10bytesparse_2_c_Block_Delete_(__pyx_v_that, __pyx_v_endex, __pyx_t_4); if (unlikely(__pyx_t_5 == ((Block_ *)NULL))) __PYX_ERR(0, 1560, __pyx_L1_error) __pyx_v_that = __pyx_t_5;
1561:
+1562: that = Block_Write_(that, start, size2, &src.data[src.start + start2])
__pyx_t_5 = __pyx_f_10bytesparse_2_c_Block_Write_(__pyx_v_that, __pyx_v_start, __pyx_v_size2, (&(__pyx_v_src->data[(__pyx_v_src->start + __pyx_v_start2)]))); if (unlikely(__pyx_t_5 == ((Block_ *)NULL))) __PYX_ERR(0, 1562, __pyx_L1_error)
__pyx_v_that = __pyx_t_5;
+1563: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
1564:
1565:
+1566: cdef Block_* Block_SetSlice(Block_* that, ssize_t start, ssize_t endex,
static Block_ *__pyx_f_10bytesparse_2_c_Block_SetSlice(Block_ *__pyx_v_that, Py_ssize_t __pyx_v_start, Py_ssize_t __pyx_v_endex, Block_ const *__pyx_v_src, Py_ssize_t __pyx_v_start2, Py_ssize_t __pyx_v_endex2) { Py_ssize_t __pyx_v_ssize; Py_ssize_t __pyx_v_ssize2; Block_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_SetSlice", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Block_SetSlice", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
1567: const Block_* src, ssize_t start2, ssize_t endex2) except NULL:
1568: cdef:
1569: ssize_t ssize # target size
1570: ssize_t ssize2 # source size
1571:
+1572: ssize = <ssize_t>(that.endex - that.start)
__pyx_v_ssize = ((Py_ssize_t)(__pyx_v_that->endex - __pyx_v_that->start));
+1573: ssize2 = <ssize_t>(src.endex - src.start)
__pyx_v_ssize2 = ((Py_ssize_t)(__pyx_v_src->endex - __pyx_v_src->start));
1574:
+1575: if start < 0:
__pyx_t_1 = ((__pyx_v_start < 0) != 0); if (__pyx_t_1) { /* … */ }
+1576: start += ssize # anchor to target end
__pyx_v_start = (__pyx_v_start + __pyx_v_ssize);
+1577: if start < 0:
__pyx_t_1 = ((__pyx_v_start < 0) != 0); if (__pyx_t_1) { /* … */ }
1578: # start2 -= start # skip initial source data # as per bytearray
+1579: start = 0 # trim target start
__pyx_v_start = 0;
1580:
+1581: if endex < 0:
__pyx_t_1 = ((__pyx_v_endex < 0) != 0); if (__pyx_t_1) { /* … */ }
+1582: endex += ssize # anchor to target end
__pyx_v_endex = (__pyx_v_endex + __pyx_v_ssize);
+1583: if endex < start:
__pyx_t_1 = ((__pyx_v_endex < __pyx_v_start) != 0); if (__pyx_t_1) { /* … */ }
+1584: endex = start # clamp negative target length
__pyx_v_endex = __pyx_v_start;
1585:
+1586: if start2 < 0:
__pyx_t_1 = ((__pyx_v_start2 < 0) != 0); if (__pyx_t_1) { /* … */ }
+1587: start2 += ssize2 # anchor to source end
__pyx_v_start2 = (__pyx_v_start2 + __pyx_v_ssize2);
+1588: if start2 < 0:
__pyx_t_1 = ((__pyx_v_start2 < 0) != 0); if (__pyx_t_1) { /* … */ }
+1589: start2 = 0 # trim source start
__pyx_v_start2 = 0;
1590:
+1591: if endex2 < 0:
__pyx_t_1 = ((__pyx_v_endex2 < 0) != 0); if (__pyx_t_1) { /* … */ }
+1592: endex2 += ssize2 # anchor to source end
__pyx_v_endex2 = (__pyx_v_endex2 + __pyx_v_ssize2);
+1593: if endex2 < start2:
__pyx_t_1 = ((__pyx_v_endex2 < __pyx_v_start2) != 0); if (__pyx_t_1) { /* … */ }
+1594: endex2 = start2 # clamp negative source length
__pyx_v_endex2 = __pyx_v_start2;
1595:
+1596: that = Block_SetSlice_(that, <size_t>start, <size_t>endex, src, <size_t>start2, <size_t>endex2)
__pyx_t_2 = __pyx_f_10bytesparse_2_c_Block_SetSlice_(__pyx_v_that, ((size_t)__pyx_v_start), ((size_t)__pyx_v_endex), __pyx_v_src, ((size_t)__pyx_v_start2), ((size_t)__pyx_v_endex2)); if (unlikely(__pyx_t_2 == ((Block_ *)NULL))) __PYX_ERR(0, 1596, __pyx_L1_error)
__pyx_v_that = __pyx_t_2;
+1597: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
1598:
1599:
+1600: cdef Block_* Block_DelSlice_(Block_* that, size_t start, size_t endex) except NULL:
static Block_ *__pyx_f_10bytesparse_2_c_Block_DelSlice_(Block_ *__pyx_v_that, size_t __pyx_v_start, size_t __pyx_v_endex) { size_t __pyx_v_size; Block_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_DelSlice_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.Block_DelSlice_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
1601: cdef:
1602: size_t size
1603:
+1604: size = that.endex - that.start
__pyx_v_size = (__pyx_v_that->endex - __pyx_v_that->start);
1605:
+1606: if start > SIZE_HMAX:
__pyx_t_1 = ((__pyx_v_start > SIZE_HMAX) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+1607: raise OverflowError('size overflow')
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_OverflowError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1607, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 1607, __pyx_L1_error)
+1608: elif start > size:
__pyx_t_1 = ((__pyx_v_start > __pyx_v_size) != 0); if (__pyx_t_1) { /* … */ }
+1609: start = size # trim start
__pyx_v_start = __pyx_v_size;
1610:
+1611: if endex > SIZE_HMAX:
__pyx_t_1 = ((__pyx_v_endex > SIZE_HMAX) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+1612: raise OverflowError('size overflow')
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_OverflowError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1612, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 1612, __pyx_L1_error)
+1613: elif endex < start:
__pyx_t_1 = ((__pyx_v_endex < __pyx_v_start) != 0); if (__pyx_t_1) { /* … */ goto __pyx_L4; }
+1614: endex = start # clamp negative length
__pyx_v_endex = __pyx_v_start;
+1615: elif endex > size:
__pyx_t_1 = ((__pyx_v_endex > __pyx_v_size) != 0); if (__pyx_t_1) { /* … */ } __pyx_L4:;
+1616: endex = size # trim end
__pyx_v_endex = __pyx_v_size;
1617:
+1618: that = Block_Delete_(that, start, (endex - start))
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Block_Delete_(__pyx_v_that, __pyx_v_start, (__pyx_v_endex - __pyx_v_start)); if (unlikely(__pyx_t_3 == ((Block_ *)NULL))) __PYX_ERR(0, 1618, __pyx_L1_error)
__pyx_v_that = __pyx_t_3;
+1619: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
1620:
1621:
+1622: cdef Block_* Block_DelSlice(Block_* that, ssize_t start, ssize_t endex) except NULL:
static Block_ *__pyx_f_10bytesparse_2_c_Block_DelSlice(Block_ *__pyx_v_that, Py_ssize_t __pyx_v_start, Py_ssize_t __pyx_v_endex) { Py_ssize_t __pyx_v_ssize; Block_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_DelSlice", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Block_DelSlice", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
1623: cdef:
1624: ssize_t ssize
1625:
+1626: ssize = <ssize_t>(that.endex - that.start)
__pyx_v_ssize = ((Py_ssize_t)(__pyx_v_that->endex - __pyx_v_that->start));
1627:
+1628: if start < 0:
__pyx_t_1 = ((__pyx_v_start < 0) != 0); if (__pyx_t_1) { /* … */ }
+1629: start += ssize # anchor to end
__pyx_v_start = (__pyx_v_start + __pyx_v_ssize);
+1630: if start < 0:
__pyx_t_1 = ((__pyx_v_start < 0) != 0); if (__pyx_t_1) { /* … */ }
+1631: start = 0 # trim start
__pyx_v_start = 0;
1632:
+1633: if endex < 0:
__pyx_t_1 = ((__pyx_v_endex < 0) != 0); if (__pyx_t_1) { /* … */ }
+1634: endex += ssize # anchor to end
__pyx_v_endex = (__pyx_v_endex + __pyx_v_ssize);
+1635: if endex < start:
__pyx_t_1 = ((__pyx_v_endex < __pyx_v_start) != 0); if (__pyx_t_1) { /* … */ }
+1636: endex = start # clamp negative length
__pyx_v_endex = __pyx_v_start;
1637:
+1638: that = Block_DelSlice_(that, <size_t>start, <size_t>endex)
__pyx_t_2 = __pyx_f_10bytesparse_2_c_Block_DelSlice_(__pyx_v_that, ((size_t)__pyx_v_start), ((size_t)__pyx_v_endex)); if (unlikely(__pyx_t_2 == ((Block_ *)NULL))) __PYX_ERR(0, 1638, __pyx_L1_error)
__pyx_v_that = __pyx_t_2;
+1639: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
1640:
1641:
+1642: cdef bytes Block_Bytes(const Block_* that):
static PyObject *__pyx_f_10bytesparse_2_c_Block_Bytes(Block_ const *__pyx_v_that) { char *__pyx_v_ptr; Py_ssize_t __pyx_v_size; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_Bytes", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("bytesparse._c.Block_Bytes", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 0; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
1643: cdef:
+1644: char* ptr = <char*><void*>&that.data[that.start]
__pyx_v_ptr = ((char *)((void *)(&(__pyx_v_that->data[__pyx_v_that->start]))));
+1645: ssize_t size = <ssize_t>(that.endex - that.start)
__pyx_v_size = ((Py_ssize_t)(__pyx_v_that->endex - __pyx_v_that->start));
1646:
+1647: return PyBytes_FromStringAndSize(ptr, size)
__Pyx_XDECREF(__pyx_r); __pyx_t_1 = PyBytes_FromStringAndSize(__pyx_v_ptr, __pyx_v_size); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1647, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_r = ((PyObject*)__pyx_t_1); __pyx_t_1 = 0; goto __pyx_L0;
1648:
1649:
+1650: cdef bytearray Block_Bytearray(const Block_* that):
static PyObject *__pyx_f_10bytesparse_2_c_Block_Bytearray(Block_ const *__pyx_v_that) { char *__pyx_v_ptr; Py_ssize_t __pyx_v_size; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_Bytearray", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("bytesparse._c.Block_Bytearray", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 0; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
1651: cdef:
+1652: char* ptr = <char*><void*>&that.data[that.start]
__pyx_v_ptr = ((char *)((void *)(&(__pyx_v_that->data[__pyx_v_that->start]))));
+1653: ssize_t size = <ssize_t>(that.endex - that.start)
__pyx_v_size = ((Py_ssize_t)(__pyx_v_that->endex - __pyx_v_that->start));
1654:
+1655: return PyByteArray_FromStringAndSize(ptr, size)
__Pyx_XDECREF(__pyx_r); __pyx_t_1 = PyByteArray_FromStringAndSize(__pyx_v_ptr, __pyx_v_size); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1655, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_r = ((PyObject*)__pyx_t_1); __pyx_t_1 = 0; goto __pyx_L0;
1656:
1657:
+1658: cdef BlockView Block_View(Block_* that):
static struct __pyx_obj_10bytesparse_2_c_BlockView *__pyx_f_10bytesparse_2_c_Block_View(Block_ *__pyx_v_that) { struct __pyx_obj_10bytesparse_2_c_BlockView *__pyx_v_view = 0; struct __pyx_obj_10bytesparse_2_c_BlockView *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_View", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("bytesparse._c.Block_View", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 0; __pyx_L0:; __Pyx_XDECREF((PyObject *)__pyx_v_view); __Pyx_XGIVEREF((PyObject *)__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
1659: cdef:
1660: BlockView view
1661:
+1662: view = BlockView()
__pyx_t_1 = __Pyx_PyObject_CallNoArg(((PyObject *)__pyx_ptype_10bytesparse_2_c_BlockView)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1662, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_v_view = ((struct __pyx_obj_10bytesparse_2_c_BlockView *)__pyx_t_1); __pyx_t_1 = 0;
+1663: that = Block_Acquire(that)
__pyx_t_2 = __pyx_f_10bytesparse_2_c_Block_Acquire(__pyx_v_that); if (unlikely(__pyx_t_2 == ((Block_ *)NULL))) __PYX_ERR(0, 1663, __pyx_L1_error)
__pyx_v_that = __pyx_t_2;
+1664: view._block = that
__pyx_v_view->_block = __pyx_v_that;
+1665: view._start = that.start
__pyx_t_3 = __pyx_v_that->start; __pyx_v_view->_start = __pyx_t_3;
+1666: view._endex = that.endex
__pyx_t_3 = __pyx_v_that->endex; __pyx_v_view->_endex = __pyx_t_3;
+1667: return view
__Pyx_XDECREF(((PyObject *)__pyx_r)); __Pyx_INCREF(((PyObject *)__pyx_v_view)); __pyx_r = __pyx_v_view; goto __pyx_L0;
1668:
1669:
+1670: cdef BlockView Block_ViewSlice_(Block_* that, size_t start, size_t endex):
static struct __pyx_obj_10bytesparse_2_c_BlockView *__pyx_f_10bytesparse_2_c_Block_ViewSlice_(Block_ *__pyx_v_that, size_t __pyx_v_start, size_t __pyx_v_endex) { size_t __pyx_v_size; struct __pyx_obj_10bytesparse_2_c_BlockView *__pyx_v_view = 0; struct __pyx_obj_10bytesparse_2_c_BlockView *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_ViewSlice_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.Block_ViewSlice_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 0; __pyx_L0:; __Pyx_XDECREF((PyObject *)__pyx_v_view); __Pyx_XGIVEREF((PyObject *)__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
1671: cdef:
+1672: size_t size = that.endex - that.start
__pyx_v_size = (__pyx_v_that->endex - __pyx_v_that->start);
1673: BlockView view
1674:
+1675: if start > SIZE_HMAX:
__pyx_t_1 = ((__pyx_v_start > SIZE_HMAX) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+1676: raise OverflowError('size overflow')
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_OverflowError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1676, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 1676, __pyx_L1_error)
+1677: elif start > size:
__pyx_t_1 = ((__pyx_v_start > __pyx_v_size) != 0); if (__pyx_t_1) { /* … */ }
+1678: start = size # trim source start
__pyx_v_start = __pyx_v_size;
1679:
+1680: if endex > SIZE_HMAX:
__pyx_t_1 = ((__pyx_v_endex > SIZE_HMAX) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+1681: raise OverflowError('size overflow')
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_OverflowError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1681, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 1681, __pyx_L1_error)
+1682: elif endex < start:
__pyx_t_1 = ((__pyx_v_endex < __pyx_v_start) != 0); if (__pyx_t_1) { /* … */ goto __pyx_L4; }
+1683: endex = start # clamp negative source length
__pyx_v_endex = __pyx_v_start;
+1684: elif endex > size:
__pyx_t_1 = ((__pyx_v_endex > __pyx_v_size) != 0); if (__pyx_t_1) { /* … */ } __pyx_L4:;
+1685: endex = size # trim source end
__pyx_v_endex = __pyx_v_size;
1686:
+1687: view = BlockView()
__pyx_t_2 = __Pyx_PyObject_CallNoArg(((PyObject *)__pyx_ptype_10bytesparse_2_c_BlockView)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1687, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_v_view = ((struct __pyx_obj_10bytesparse_2_c_BlockView *)__pyx_t_2); __pyx_t_2 = 0;
+1688: that = Block_Acquire(that)
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Block_Acquire(__pyx_v_that); if (unlikely(__pyx_t_3 == ((Block_ *)NULL))) __PYX_ERR(0, 1688, __pyx_L1_error)
__pyx_v_that = __pyx_t_3;
+1689: view._block = that
__pyx_v_view->_block = __pyx_v_that;
+1690: view._start = that.start + start
__pyx_v_view->_start = (__pyx_v_that->start + __pyx_v_start);
+1691: view._endex = that.start + endex
__pyx_v_view->_endex = (__pyx_v_that->start + __pyx_v_endex);
+1692: return view
__Pyx_XDECREF(((PyObject *)__pyx_r)); __Pyx_INCREF(((PyObject *)__pyx_v_view)); __pyx_r = __pyx_v_view; goto __pyx_L0;
1693:
1694:
+1695: cdef BlockView Block_ViewSlice(Block_* that, ssize_t start, ssize_t endex):
static struct __pyx_obj_10bytesparse_2_c_BlockView *__pyx_f_10bytesparse_2_c_Block_ViewSlice(Block_ *__pyx_v_that, Py_ssize_t __pyx_v_start, Py_ssize_t __pyx_v_endex) { Py_ssize_t __pyx_v_ssize; struct __pyx_obj_10bytesparse_2_c_BlockView *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Block_ViewSlice", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.Block_ViewSlice", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 0; __pyx_L0:; __Pyx_XGIVEREF((PyObject *)__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
1696: cdef:
1697: ssize_t ssize
1698:
+1699: ssize = <ssize_t>(that.endex - that.start)
__pyx_v_ssize = ((Py_ssize_t)(__pyx_v_that->endex - __pyx_v_that->start));
1700:
+1701: if start < 0:
__pyx_t_1 = ((__pyx_v_start < 0) != 0); if (__pyx_t_1) { /* … */ }
+1702: start += ssize # anchor to end
__pyx_v_start = (__pyx_v_start + __pyx_v_ssize);
+1703: if start < 0:
__pyx_t_1 = ((__pyx_v_start < 0) != 0); if (__pyx_t_1) { /* … */ }
+1704: start = 0 # trim source start
__pyx_v_start = 0;
1705:
+1706: if endex < 0:
__pyx_t_1 = ((__pyx_v_endex < 0) != 0); if (__pyx_t_1) { /* … */ }
+1707: endex += ssize # anchor to end
__pyx_v_endex = (__pyx_v_endex + __pyx_v_ssize);
+1708: if endex < start:
__pyx_t_1 = ((__pyx_v_endex < __pyx_v_start) != 0); if (__pyx_t_1) { /* … */ }
+1709: endex = start # clamp negative source length
__pyx_v_endex = __pyx_v_start;
1710:
+1711: return Block_ViewSlice_(that, <size_t>start, <size_t>endex)
__Pyx_XDECREF(((PyObject *)__pyx_r)); __pyx_t_2 = ((PyObject *)__pyx_f_10bytesparse_2_c_Block_ViewSlice_(__pyx_v_that, ((size_t)__pyx_v_start), ((size_t)__pyx_v_endex))); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1711, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_r = ((struct __pyx_obj_10bytesparse_2_c_BlockView *)__pyx_t_2); __pyx_t_2 = 0; goto __pyx_L0;
1712:
1713:
1714: # ---------------------------------------------------------------------------------------------------------------------
1715:
+1716: cdef class BlockView:
struct __pyx_vtabstruct_10bytesparse_2_c_BlockView { int (*check_)(struct __pyx_obj_10bytesparse_2_c_BlockView *); }; static struct __pyx_vtabstruct_10bytesparse_2_c_BlockView *__pyx_vtabptr_10bytesparse_2_c_BlockView;
1717:
+1718: def __cinit__(self):
/* Python wrapper */ static int __pyx_pw_10bytesparse_2_c_9BlockView_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static int __pyx_pw_10bytesparse_2_c_9BlockView_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__cinit__ (wrapper)", 0); if (unlikely(PyTuple_GET_SIZE(__pyx_args) > 0)) { __Pyx_RaiseArgtupleInvalid("__cinit__", 1, 0, 0, PyTuple_GET_SIZE(__pyx_args)); return -1;} if (unlikely(__pyx_kwds) && unlikely(PyDict_Size(__pyx_kwds) > 0) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__cinit__", 0))) return -1; __pyx_r = __pyx_pf_10bytesparse_2_c_9BlockView___cinit__(((struct __pyx_obj_10bytesparse_2_c_BlockView *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static int __pyx_pf_10bytesparse_2_c_9BlockView___cinit__(struct __pyx_obj_10bytesparse_2_c_BlockView *__pyx_v_self) { int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__cinit__", 0); /* … */ /* function exit code */ __pyx_r = 0; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+1719: self._block = NULL
__pyx_v_self->_block = NULL;
1720:
+1721: def __dealloc__(self):
/* Python wrapper */ static void __pyx_pw_10bytesparse_2_c_9BlockView_3__dealloc__(PyObject *__pyx_v_self); /*proto*/ static void __pyx_pw_10bytesparse_2_c_9BlockView_3__dealloc__(PyObject *__pyx_v_self) { __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__dealloc__ (wrapper)", 0); __pyx_pf_10bytesparse_2_c_9BlockView_2__dealloc__(((struct __pyx_obj_10bytesparse_2_c_BlockView *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); } static void __pyx_pf_10bytesparse_2_c_9BlockView_2__dealloc__(struct __pyx_obj_10bytesparse_2_c_BlockView *__pyx_v_self) { __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__dealloc__", 0); /* … */ /* function exit code */ __Pyx_RefNannyFinishContext(); }
+1722: if self._block:
__pyx_t_1 = (__pyx_v_self->_block != 0); if (__pyx_t_1) { /* … */ }
+1723: self._block = Block_Release(self._block)
__pyx_v_self->_block = __pyx_f_10bytesparse_2_c_Block_Release(__pyx_v_self->_block);
1724:
+1725: def __getbuffer__(self, Py_buffer* buffer, int flags):
/* Python wrapper */ static CYTHON_UNUSED int __pyx_pw_10bytesparse_2_c_9BlockView_5__getbuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_buffer, int __pyx_v_flags); /*proto*/ static CYTHON_UNUSED int __pyx_pw_10bytesparse_2_c_9BlockView_5__getbuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_buffer, int __pyx_v_flags) { int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__getbuffer__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_9BlockView_4__getbuffer__(((struct __pyx_obj_10bytesparse_2_c_BlockView *)__pyx_v_self), ((Py_buffer *)__pyx_v_buffer), ((int)__pyx_v_flags)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static int __pyx_pf_10bytesparse_2_c_9BlockView_4__getbuffer__(struct __pyx_obj_10bytesparse_2_c_BlockView *__pyx_v_self, Py_buffer *__pyx_v_buffer, int __pyx_v_flags) { int __pyx_v_CONTIGUOUS; int __pyx_r; if (__pyx_v_buffer == NULL) { PyErr_SetString(PyExc_BufferError, "PyObject_GetBuffer: view==NULL argument is obsolete"); return -1; } __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__getbuffer__", 0); __pyx_v_buffer->obj = Py_None; __Pyx_INCREF(Py_None); __Pyx_GIVEREF(__pyx_v_buffer->obj); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.BlockView.__getbuffer__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; if (__pyx_v_buffer->obj != NULL) { __Pyx_GOTREF(__pyx_v_buffer->obj); __Pyx_DECREF(__pyx_v_buffer->obj); __pyx_v_buffer->obj = 0; } goto __pyx_L2; __pyx_L0:; if (__pyx_v_buffer->obj == Py_None) { __Pyx_GOTREF(__pyx_v_buffer->obj); __Pyx_DECREF(__pyx_v_buffer->obj); __pyx_v_buffer->obj = 0; } __pyx_L2:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
1726: cdef:
+1727: int CONTIGUOUS = PyBUF_C_CONTIGUOUS | PyBUF_F_CONTIGUOUS | PyBUF_ANY_CONTIGUOUS
__pyx_v_CONTIGUOUS = ((PyBUF_C_CONTIGUOUS | PyBUF_F_CONTIGUOUS) | PyBUF_ANY_CONTIGUOUS);
1728:
+1729: if flags & PyBUF_WRITABLE:
__pyx_t_1 = ((__pyx_v_flags & PyBUF_WRITABLE) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+1730: raise ValueError('read only access')
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__9, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1730, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 1730, __pyx_L1_error) /* … */ __pyx_tuple__9 = PyTuple_Pack(1, __pyx_kp_u_read_only_access); if (unlikely(!__pyx_tuple__9)) __PYX_ERR(0, 1730, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__9); __Pyx_GIVEREF(__pyx_tuple__9);
1731:
+1732: self.check_()
__pyx_t_1 = ((struct __pyx_vtabstruct_10bytesparse_2_c_BlockView *)__pyx_v_self->__pyx_vtab)->check_(__pyx_v_self); if (unlikely(__pyx_t_1 == ((int)-1))) __PYX_ERR(0, 1732, __pyx_L1_error)
1733:
1734: # self._block = Block_Acquire(self._block)
1735:
+1736: buffer.buf = &self._block.data[self._start]
__pyx_v_buffer->buf = (&(__pyx_v_self->_block->data[__pyx_v_self->_start]));
+1737: buffer.obj = self
__Pyx_INCREF(((PyObject *)__pyx_v_self)); __Pyx_GIVEREF(((PyObject *)__pyx_v_self)); __Pyx_GOTREF(__pyx_v_buffer->obj); __Pyx_DECREF(__pyx_v_buffer->obj); __pyx_v_buffer->obj = ((PyObject *)__pyx_v_self);
+1738: buffer.len = self._endex - self._start
__pyx_v_buffer->len = (__pyx_v_self->_endex - __pyx_v_self->_start);
+1739: buffer.itemsize = 1
__pyx_v_buffer->itemsize = 1;
+1740: buffer.readonly = 1
__pyx_v_buffer->readonly = 1;
+1741: buffer.ndim = 1
__pyx_v_buffer->ndim = 1;
+1742: buffer.format = <char*>'B' if flags & (PyBUF_FORMAT | CONTIGUOUS) else NULL
if (((__pyx_v_flags & (PyBUF_FORMAT | __pyx_v_CONTIGUOUS)) != 0)) { __pyx_t_3 = ((char *)((char *)"B")); } else { __pyx_t_3 = NULL; } __pyx_v_buffer->format = __pyx_t_3;
+1743: buffer.shape = &buffer.len if flags & (PyBUF_ND | CONTIGUOUS) else NULL
if (((__pyx_v_flags & (PyBUF_ND | __pyx_v_CONTIGUOUS)) != 0)) { __pyx_t_4 = (&__pyx_v_buffer->len); } else { __pyx_t_4 = NULL; } __pyx_v_buffer->shape = __pyx_t_4;
+1744: buffer.strides = &buffer.itemsize if flags & (PyBUF_STRIDES | CONTIGUOUS) else NULL
if (((__pyx_v_flags & (PyBUF_STRIDES | __pyx_v_CONTIGUOUS)) != 0)) { __pyx_t_4 = (&__pyx_v_buffer->itemsize); } else { __pyx_t_4 = NULL; } __pyx_v_buffer->strides = __pyx_t_4;
+1745: buffer.suboffsets = NULL
__pyx_v_buffer->suboffsets = NULL;
+1746: buffer.internal = NULL
__pyx_v_buffer->internal = NULL;
1747:
+1748: def __releasebuffer__(self, Py_buffer* buffer):
/* Python wrapper */ static CYTHON_UNUSED void __pyx_pw_10bytesparse_2_c_9BlockView_7__releasebuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_buffer); /*proto*/ static CYTHON_UNUSED void __pyx_pw_10bytesparse_2_c_9BlockView_7__releasebuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_buffer) { __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__releasebuffer__ (wrapper)", 0); __pyx_pf_10bytesparse_2_c_9BlockView_6__releasebuffer__(((struct __pyx_obj_10bytesparse_2_c_BlockView *)__pyx_v_self), ((Py_buffer *)__pyx_v_buffer)); /* function exit code */ __Pyx_RefNannyFinishContext(); } static void __pyx_pf_10bytesparse_2_c_9BlockView_6__releasebuffer__(CYTHON_UNUSED struct __pyx_obj_10bytesparse_2_c_BlockView *__pyx_v_self, CYTHON_UNUSED Py_buffer *__pyx_v_buffer) { __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__releasebuffer__", 0); /* function exit code */ __Pyx_RefNannyFinishContext(); }
1749: # if self._block:
1750: # self._block = Block_Release(self._block)
1751: pass
1752:
+1753: def __repr__(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_9BlockView_9__repr__(PyObject *__pyx_v_self); /*proto*/ static PyObject *__pyx_pw_10bytesparse_2_c_9BlockView_9__repr__(PyObject *__pyx_v_self) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__repr__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_9BlockView_8__repr__(((struct __pyx_obj_10bytesparse_2_c_BlockView *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_9BlockView_8__repr__(struct __pyx_obj_10bytesparse_2_c_BlockView *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__repr__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.BlockView.__repr__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
1754: self: 'BlockView',
1755: ) -> str:
1756:
+1757: return repr(str(self))
__Pyx_XDECREF(__pyx_r); __pyx_t_1 = __Pyx_PyObject_CallOneArg(((PyObject *)(&PyUnicode_Type)), ((PyObject *)__pyx_v_self)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1757, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_t_2 = PyObject_Repr(__pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1757, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_r = __pyx_t_2; __pyx_t_2 = 0; goto __pyx_L0;
1758:
+1759: def __str__(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_9BlockView_11__str__(PyObject *__pyx_v_self); /*proto*/ static PyObject *__pyx_pw_10bytesparse_2_c_9BlockView_11__str__(PyObject *__pyx_v_self) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__str__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_9BlockView_10__str__(((struct __pyx_obj_10bytesparse_2_c_BlockView *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_9BlockView_10__str__(struct __pyx_obj_10bytesparse_2_c_BlockView *__pyx_v_self) { Block_ const *__pyx_v_block; size_t __pyx_v_size; addr_t __pyx_v_start; addr_t __pyx_v_endex; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__str__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_4); __Pyx_XDECREF(__pyx_t_5); __Pyx_XDECREF(__pyx_t_10); __Pyx_AddTraceback("bytesparse._c.BlockView.__str__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
1760: self: 'BlockView',
1761: ) -> str:
1762: cdef:
+1763: const Block_* block = self._block
__pyx_t_1 = __pyx_v_self->_block; __pyx_v_block = __pyx_t_1;
+1764: size_t size = self._endex - self._start
__pyx_v_size = (__pyx_v_self->_endex - __pyx_v_self->_start);
1765: addr_t start
1766: addr_t endex
1767:
+1768: self.check_()
__pyx_t_2 = ((struct __pyx_vtabstruct_10bytesparse_2_c_BlockView *)__pyx_v_self->__pyx_vtab)->check_(__pyx_v_self); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 1768, __pyx_L1_error)
1769:
+1770: if size > STR_MAX_CONTENT_SIZE:
__pyx_t_3 = __Pyx_PyInt_FromSize_t(__pyx_v_size); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1770, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_STR_MAX_CONTENT_SIZE); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1770, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __pyx_t_5 = PyObject_RichCompare(__pyx_t_3, __pyx_t_4, Py_GT); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1770, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 1770, __pyx_L1_error) __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; if (__pyx_t_2) { /* … */ }
+1771: start = block.address
__pyx_t_6 = __pyx_v_block->address; __pyx_v_start = __pyx_t_6;
+1772: CheckAddAddrU(start, size)
__pyx_t_7 = __pyx_f_10bytesparse_2_c_CheckAddAddrU(__pyx_v_start, __pyx_v_size); if (unlikely(__pyx_t_7 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 1772, __pyx_L1_error)
+1773: endex = start + size
__pyx_v_endex = (__pyx_v_start + __pyx_v_size);
+1774: return f'<{type(self).__name__}[0x{start:X}:0x{endex:X}]@0x{<uintptr_t><void*>self:X}>'
__Pyx_XDECREF(__pyx_r); __pyx_t_5 = PyTuple_New(9); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1774, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __pyx_t_8 = 0; __pyx_t_9 = 127; __Pyx_INCREF(__pyx_kp_u__10); __pyx_t_8 += 1; __Pyx_GIVEREF(__pyx_kp_u__10); PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_kp_u__10); __pyx_t_4 = __Pyx_PyObject_GetAttrStr(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self))), __pyx_n_s_name); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1774, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __pyx_t_3 = __Pyx_PyObject_FormatSimple(__pyx_t_4, __pyx_empty_unicode); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1774, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __pyx_t_9 = (__Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_3) > __pyx_t_9) ? __Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_3) : __pyx_t_9; __pyx_t_8 += __Pyx_PyUnicode_GET_LENGTH(__pyx_t_3); __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_5, 1, __pyx_t_3); __pyx_t_3 = 0; __Pyx_INCREF(__pyx_kp_u_0x); __pyx_t_8 += 3; __Pyx_GIVEREF(__pyx_kp_u_0x); PyTuple_SET_ITEM(__pyx_t_5, 2, __pyx_kp_u_0x); __pyx_t_3 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_start); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1774, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_4 = __Pyx_PyObject_Format(__pyx_t_3, __pyx_n_u_X); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1774, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __pyx_t_9 = (__Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_4) > __pyx_t_9) ? __Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_4) : __pyx_t_9; __pyx_t_8 += __Pyx_PyUnicode_GET_LENGTH(__pyx_t_4); __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_5, 3, __pyx_t_4); __pyx_t_4 = 0; __Pyx_INCREF(__pyx_kp_u_0x_2); __pyx_t_8 += 3; __Pyx_GIVEREF(__pyx_kp_u_0x_2); PyTuple_SET_ITEM(__pyx_t_5, 4, __pyx_kp_u_0x_2); __pyx_t_4 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_endex); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1774, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __pyx_t_3 = __Pyx_PyObject_Format(__pyx_t_4, __pyx_n_u_X); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1774, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __pyx_t_9 = (__Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_3) > __pyx_t_9) ? __Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_3) : __pyx_t_9; __pyx_t_8 += __Pyx_PyUnicode_GET_LENGTH(__pyx_t_3); __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_5, 5, __pyx_t_3); __pyx_t_3 = 0; __Pyx_INCREF(__pyx_kp_u_0x_3); __pyx_t_8 += 4; __Pyx_GIVEREF(__pyx_kp_u_0x_3); PyTuple_SET_ITEM(__pyx_t_5, 6, __pyx_kp_u_0x_3); __pyx_t_3 = __Pyx_PyInt_FromSize_t(((uintptr_t)((void *)__pyx_v_self))); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1774, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_4 = __Pyx_PyObject_Format(__pyx_t_3, __pyx_n_u_X); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1774, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __pyx_t_9 = (__Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_4) > __pyx_t_9) ? __Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_4) : __pyx_t_9; __pyx_t_8 += __Pyx_PyUnicode_GET_LENGTH(__pyx_t_4); __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_5, 7, __pyx_t_4); __pyx_t_4 = 0; __Pyx_INCREF(__pyx_kp_u__11); __pyx_t_8 += 1; __Pyx_GIVEREF(__pyx_kp_u__11); PyTuple_SET_ITEM(__pyx_t_5, 8, __pyx_kp_u__11); __pyx_t_4 = __Pyx_PyUnicode_Join(__pyx_t_5, 9, __pyx_t_8, __pyx_t_9); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1774, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; __pyx_r = __pyx_t_4; __pyx_t_4 = 0; goto __pyx_L0;
1775:
1776: else:
+1777: return self.memview.tobytes().decode('ascii')
/*else*/ { __Pyx_XDECREF(__pyx_r); __pyx_t_3 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_memview); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1777, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_10 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_tobytes); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 1777, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_10); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __pyx_t_3 = NULL; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_10))) { __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_10); if (likely(__pyx_t_3)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_10); __Pyx_INCREF(__pyx_t_3); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_10, function); } } __pyx_t_5 = (__pyx_t_3) ? __Pyx_PyObject_CallOneArg(__pyx_t_10, __pyx_t_3) : __Pyx_PyObject_CallNoArg(__pyx_t_10); __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1777, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; __pyx_t_10 = __Pyx_PyObject_GetAttrStr(__pyx_t_5, __pyx_n_s_decode); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 1777, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_10); __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; __pyx_t_5 = NULL; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_10))) { __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_10); if (likely(__pyx_t_5)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_10); __Pyx_INCREF(__pyx_t_5); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_10, function); } } __pyx_t_4 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_10, __pyx_t_5, __pyx_n_u_ascii) : __Pyx_PyObject_CallOneArg(__pyx_t_10, __pyx_n_u_ascii); __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1777, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; __pyx_r = __pyx_t_4; __pyx_t_4 = 0; goto __pyx_L0; }
1778:
+1779: def __bool__(
/* Python wrapper */ static int __pyx_pw_10bytesparse_2_c_9BlockView_13__bool__(PyObject *__pyx_v_self); /*proto*/ static int __pyx_pw_10bytesparse_2_c_9BlockView_13__bool__(PyObject *__pyx_v_self) { int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__bool__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_9BlockView_12__bool__(((struct __pyx_obj_10bytesparse_2_c_BlockView *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static int __pyx_pf_10bytesparse_2_c_9BlockView_12__bool__(struct __pyx_obj_10bytesparse_2_c_BlockView *__pyx_v_self) { int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__bool__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.BlockView.__bool__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
1780: self: 'BlockView',
1781: ) -> bool:
1782:
+1783: self.check_()
__pyx_t_1 = ((struct __pyx_vtabstruct_10bytesparse_2_c_BlockView *)__pyx_v_self->__pyx_vtab)->check_(__pyx_v_self); if (unlikely(__pyx_t_1 == ((int)-1))) __PYX_ERR(0, 1783, __pyx_L1_error)
+1784: return self._start < self._endex
__pyx_r = (__pyx_v_self->_start < __pyx_v_self->_endex); goto __pyx_L0;
1785:
+1786: def __bytes__(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_9BlockView_15__bytes__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ static char __pyx_doc_10bytesparse_2_c_9BlockView_14__bytes__[] = "BlockView.__bytes__(self: u'BlockView') -> bytes"; static PyObject *__pyx_pw_10bytesparse_2_c_9BlockView_15__bytes__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__bytes__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_9BlockView_14__bytes__(((struct __pyx_obj_10bytesparse_2_c_BlockView *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_9BlockView_14__bytes__(struct __pyx_obj_10bytesparse_2_c_BlockView *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__bytes__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.BlockView.__bytes__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
1787: self: 'BlockView',
1788: ) -> bytes:
1789:
+1790: return bytes(self.memview)
__Pyx_XDECREF(__pyx_r); __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_memview); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1790, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_t_2 = __Pyx_PyObject_CallOneArg(((PyObject *)(&PyBytes_Type)), __pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1790, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_r = ((PyObject*)__pyx_t_2); __pyx_t_2 = 0; goto __pyx_L0;
1791:
1792: @property
+1793: def memview(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_9BlockView_7memview_1__get__(PyObject *__pyx_v_self); /*proto*/ static PyObject *__pyx_pw_10bytesparse_2_c_9BlockView_7memview_1__get__(PyObject *__pyx_v_self) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_9BlockView_7memview___get__(((struct __pyx_obj_10bytesparse_2_c_BlockView *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_9BlockView_7memview___get__(struct __pyx_obj_10bytesparse_2_c_BlockView *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_4); __Pyx_AddTraceback("bytesparse._c.BlockView.memview.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
1794: self: 'BlockView',
1795: ) -> memoryview:
1796:
+1797: self.check_()
__pyx_t_1 = ((struct __pyx_vtabstruct_10bytesparse_2_c_BlockView *)__pyx_v_self->__pyx_vtab)->check_(__pyx_v_self); if (unlikely(__pyx_t_1 == ((int)-1))) __PYX_ERR(0, 1797, __pyx_L1_error)
+1798: if self._memview is None:
__pyx_t_1 = (__pyx_v_self->_memview == Py_None); __pyx_t_2 = (__pyx_t_1 != 0); if (__pyx_t_2) { /* … */ }
+1799: self._memview = memoryview(self)
__Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_memoryview); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1799, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_4 = __Pyx_PyObject_CallOneArg(__pyx_t_3, ((PyObject *)__pyx_v_self)); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1799, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_GIVEREF(__pyx_t_4); __Pyx_GOTREF(__pyx_v_self->_memview); __Pyx_DECREF(__pyx_v_self->_memview); __pyx_v_self->_memview = __pyx_t_4; __pyx_t_4 = 0;
+1800: return self._memview
__Pyx_XDECREF(__pyx_r); __Pyx_INCREF(__pyx_v_self->_memview); __pyx_r = __pyx_v_self->_memview; goto __pyx_L0;
1801:
+1802: def __len__(
/* Python wrapper */ static Py_ssize_t __pyx_pw_10bytesparse_2_c_9BlockView_17__len__(PyObject *__pyx_v_self); /*proto*/ static Py_ssize_t __pyx_pw_10bytesparse_2_c_9BlockView_17__len__(PyObject *__pyx_v_self) { Py_ssize_t __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__len__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_9BlockView_16__len__(((struct __pyx_obj_10bytesparse_2_c_BlockView *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static Py_ssize_t __pyx_pf_10bytesparse_2_c_9BlockView_16__len__(struct __pyx_obj_10bytesparse_2_c_BlockView *__pyx_v_self) { Py_ssize_t __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__len__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.BlockView.__len__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
1803: self: 'BlockView',
1804: ) -> Address:
1805:
+1806: self.check_()
__pyx_t_1 = ((struct __pyx_vtabstruct_10bytesparse_2_c_BlockView *)__pyx_v_self->__pyx_vtab)->check_(__pyx_v_self); if (unlikely(__pyx_t_1 == ((int)-1))) __PYX_ERR(0, 1806, __pyx_L1_error)
+1807: return self._endex - self._start
__pyx_r = (__pyx_v_self->_endex - __pyx_v_self->_start); goto __pyx_L0;
1808:
+1809: def __getattr__(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_9BlockView_19__getattr__(PyObject *__pyx_v_self, PyObject *__pyx_v_attr); /*proto*/ static PyObject *__pyx_pw_10bytesparse_2_c_9BlockView_19__getattr__(PyObject *__pyx_v_self, PyObject *__pyx_v_attr) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__getattr__ (wrapper)", 0); if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_attr), (&PyUnicode_Type), 1, "attr", 1))) __PYX_ERR(0, 1811, __pyx_L1_error) __pyx_r = __pyx_pf_10bytesparse_2_c_9BlockView_18__getattr__(((struct __pyx_obj_10bytesparse_2_c_BlockView *)__pyx_v_self), ((PyObject*)__pyx_v_attr)); int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; /* function exit code */ goto __pyx_L0; __pyx_L1_error:; __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_9BlockView_18__getattr__(struct __pyx_obj_10bytesparse_2_c_BlockView *__pyx_v_self, PyObject *__pyx_v_attr) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__getattr__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.BlockView.__getattr__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
1810: self: 'BlockView',
1811: attr: str,
1812: ) -> Any:
1813:
+1814: return getattr(self.memview, attr)
__Pyx_XDECREF(__pyx_r); __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_memview); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1814, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_t_2 = __Pyx_GetAttr(__pyx_t_1, __pyx_v_attr); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1814, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_r = __pyx_t_2; __pyx_t_2 = 0; goto __pyx_L0;
1815:
+1816: def __getitem__(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_9BlockView_21__getitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_item); /*proto*/ static PyObject *__pyx_pw_10bytesparse_2_c_9BlockView_21__getitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_item) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__getitem__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_9BlockView_20__getitem__(((struct __pyx_obj_10bytesparse_2_c_BlockView *)__pyx_v_self), ((PyObject *)__pyx_v_item)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_9BlockView_20__getitem__(struct __pyx_obj_10bytesparse_2_c_BlockView *__pyx_v_self, PyObject *__pyx_v_item) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__getitem__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("bytesparse._c.BlockView.__getitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
1817: self: 'BlockView',
1818: item: Any,
1819: ) -> Any:
1820:
+1821: self.check_()
__pyx_t_1 = ((struct __pyx_vtabstruct_10bytesparse_2_c_BlockView *)__pyx_v_self->__pyx_vtab)->check_(__pyx_v_self); if (unlikely(__pyx_t_1 == ((int)-1))) __PYX_ERR(0, 1821, __pyx_L1_error)
+1822: return self.memview[item]
__Pyx_XDECREF(__pyx_r); __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_memview); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1822, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_3 = __Pyx_PyObject_GetItem(__pyx_t_2, __pyx_v_item); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1822, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_r = __pyx_t_3; __pyx_t_3 = 0; goto __pyx_L0;
1823:
1824: @property
+1825: def start(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_9BlockView_5start_1__get__(PyObject *__pyx_v_self); /*proto*/ static PyObject *__pyx_pw_10bytesparse_2_c_9BlockView_5start_1__get__(PyObject *__pyx_v_self) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_9BlockView_5start___get__(((struct __pyx_obj_10bytesparse_2_c_BlockView *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_9BlockView_5start___get__(struct __pyx_obj_10bytesparse_2_c_BlockView *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("bytesparse._c.BlockView.start.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
1826: self: 'BlockView',
1827: ) -> Address:
1828:
+1829: self.check()
__pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_check); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1829, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_3 = NULL; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); if (likely(__pyx_t_3)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); __Pyx_INCREF(__pyx_t_3); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_2, function); } } __pyx_t_1 = (__pyx_t_3) ? __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3) : __Pyx_PyObject_CallNoArg(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1829, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+1830: return self._block.address
__Pyx_XDECREF(__pyx_r); __pyx_t_1 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_self->_block->address); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1830, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0;
1831:
1832: @property
+1833: def endex(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_9BlockView_5endex_1__get__(PyObject *__pyx_v_self); /*proto*/ static PyObject *__pyx_pw_10bytesparse_2_c_9BlockView_5endex_1__get__(PyObject *__pyx_v_self) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_9BlockView_5endex___get__(((struct __pyx_obj_10bytesparse_2_c_BlockView *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_9BlockView_5endex___get__(struct __pyx_obj_10bytesparse_2_c_BlockView *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("bytesparse._c.BlockView.endex.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
1834: self: 'BlockView',
1835: ) -> Address:
1836:
+1837: self.check()
__pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_check); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1837, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_3 = NULL; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); if (likely(__pyx_t_3)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); __Pyx_INCREF(__pyx_t_3); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_2, function); } } __pyx_t_1 = (__pyx_t_3) ? __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3) : __Pyx_PyObject_CallNoArg(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1837, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+1838: return self._block.address + self._endex - self._start
__Pyx_XDECREF(__pyx_r); __pyx_t_1 = __Pyx_PyInt_From_uint_fast64_t(((__pyx_v_self->_block->address + __pyx_v_self->_endex) - __pyx_v_self->_start)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1838, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0;
1839:
1840: @property
+1841: def endin(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_9BlockView_5endin_1__get__(PyObject *__pyx_v_self); /*proto*/ static PyObject *__pyx_pw_10bytesparse_2_c_9BlockView_5endin_1__get__(PyObject *__pyx_v_self) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_9BlockView_5endin___get__(((struct __pyx_obj_10bytesparse_2_c_BlockView *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_9BlockView_5endin___get__(struct __pyx_obj_10bytesparse_2_c_BlockView *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.BlockView.endin.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
1842: self: 'BlockView',
1843: ) -> Address:
1844:
+1845: return self.endex - 1
__Pyx_XDECREF(__pyx_r); __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_endex); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1845, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_t_2 = __Pyx_PyInt_SubtractObjC(__pyx_t_1, __pyx_int_1, 1, 0, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1845, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_r = __pyx_t_2; __pyx_t_2 = 0; goto __pyx_L0;
1846:
1847: @property
+1848: def acquired(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_9BlockView_8acquired_1__get__(PyObject *__pyx_v_self); /*proto*/ static PyObject *__pyx_pw_10bytesparse_2_c_9BlockView_8acquired_1__get__(PyObject *__pyx_v_self) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_9BlockView_8acquired___get__(((struct __pyx_obj_10bytesparse_2_c_BlockView *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_9BlockView_8acquired___get__(struct __pyx_obj_10bytesparse_2_c_BlockView *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("bytesparse._c.BlockView.acquired.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
1849: self: 'BlockView',
1850: ) -> bool:
1851:
+1852: return self._block != NULL
__Pyx_XDECREF(__pyx_r); __pyx_t_1 = __Pyx_PyBool_FromLong((__pyx_v_self->_block != NULL)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1852, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0;
1853:
+1854: cdef bint check_(self) except -1:
static int __pyx_f_10bytesparse_2_c_9BlockView_check_(struct __pyx_obj_10bytesparse_2_c_BlockView *__pyx_v_self) { int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("check_", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.BlockView.check_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+1855: if self._block == NULL:
__pyx_t_1 = ((__pyx_v_self->_block == NULL) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+1856: raise RuntimeError('null internal data pointer')
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_RuntimeError, __pyx_tuple__12, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1856, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 1856, __pyx_L1_error) /* … */ __pyx_tuple__12 = PyTuple_Pack(1, __pyx_kp_u_null_internal_data_pointer); if (unlikely(!__pyx_tuple__12)) __PYX_ERR(0, 1856, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__12); __Pyx_GIVEREF(__pyx_tuple__12);
1857:
+1858: def check(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_9BlockView_23check(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ static char __pyx_doc_10bytesparse_2_c_9BlockView_22check[] = "BlockView.check(self: u'BlockView') -> None"; static PyObject *__pyx_pw_10bytesparse_2_c_9BlockView_23check(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("check (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_9BlockView_22check(((struct __pyx_obj_10bytesparse_2_c_BlockView *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_9BlockView_22check(struct __pyx_obj_10bytesparse_2_c_BlockView *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("check", 0); /* … */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.BlockView.check", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
1859: self: 'BlockView',
1860: ) -> None:
1861:
+1862: self.check_()
__pyx_t_1 = ((struct __pyx_vtabstruct_10bytesparse_2_c_BlockView *)__pyx_v_self->__pyx_vtab)->check_(__pyx_v_self); if (unlikely(__pyx_t_1 == ((int)-1))) __PYX_ERR(0, 1862, __pyx_L1_error)
1863:
+1864: def dispose(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_9BlockView_25dispose(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ static char __pyx_doc_10bytesparse_2_c_9BlockView_24dispose[] = "BlockView.dispose(self: u'BlockView') -> None"; static PyObject *__pyx_pw_10bytesparse_2_c_9BlockView_25dispose(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("dispose (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_9BlockView_24dispose(((struct __pyx_obj_10bytesparse_2_c_BlockView *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_9BlockView_24dispose(struct __pyx_obj_10bytesparse_2_c_BlockView *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("dispose", 0); /* … */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
1865: self: 'BlockView',
1866: ) -> None:
1867:
+1868: if self._block:
__pyx_t_1 = (__pyx_v_self->_block != 0); if (__pyx_t_1) { /* … */ }
+1869: self._block = Block_Release(self._block)
__pyx_v_self->_block = __pyx_f_10bytesparse_2_c_Block_Release(__pyx_v_self->_block);
1870:
1871:
1872: # =====================================================================================================================
1873:
+1874: cdef Rack_* Rack_Alloc(size_t size) except NULL:
static Rack_ *__pyx_f_10bytesparse_2_c_Rack_Alloc(size_t __pyx_v_size) { Rack_ *__pyx_v_that; size_t __pyx_v_allocated; Rack_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_Alloc", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.Rack_Alloc", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
1875: cdef:
+1876: Rack_* that = NULL
__pyx_v_that = NULL;
1877: size_t allocated
1878:
+1879: if size > SIZE_HMAX:
__pyx_t_1 = ((__pyx_v_size > SIZE_HMAX) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+1880: raise OverflowError('size overflow')
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_OverflowError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1880, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 1880, __pyx_L1_error)
1881:
1882: # Allocate as per request
+1883: allocated = Upsize(0, size)
__pyx_v_allocated = __pyx_f_10bytesparse_2_c_Upsize(0, __pyx_v_size);
+1884: if allocated > SIZE_HMAX:
__pyx_t_1 = ((__pyx_v_allocated > SIZE_HMAX) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+1885: raise MemoryError()
PyErr_NoMemory(); __PYX_ERR(0, 1885, __pyx_L1_error)
1886:
+1887: that = <Rack_*>PyMem_Calloc(Rack_HEADING + (allocated * sizeof(Block_*)), 1, True)
__pyx_v_that = ((Rack_ *)__pyx_f_10bytesparse_2_c_PyMem_Calloc((Rack_HEADING + (__pyx_v_allocated * (sizeof(Block_ *)))), 1, 1));
+1888: if that == NULL:
__pyx_t_1 = ((__pyx_v_that == NULL) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+1889: raise MemoryError()
PyErr_NoMemory(); __PYX_ERR(0, 1889, __pyx_L1_error)
1890:
+1891: that.allocated = allocated
__pyx_v_that->allocated = __pyx_v_allocated;
+1892: that.start = MARGIN # leave some initial room
__pyx_v_that->start = MARGIN;
+1893: that.endex = that.start + size
__pyx_v_that->endex = (__pyx_v_that->start + __pyx_v_size);
+1894: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
1895:
1896:
+1897: cdef Rack_* Rack_Free(Rack_* that):
static Rack_ *__pyx_f_10bytesparse_2_c_Rack_Free(Rack_ *__pyx_v_that) { size_t __pyx_v_index; Rack_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_Free", 0); /* … */ /* function exit code */ __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
1898: cdef:
1899: size_t index
1900:
+1901: if that:
__pyx_t_1 = (__pyx_v_that != 0); if (__pyx_t_1) { /* … */ }
1902: # Decrement data referencing
+1903: for index in range(that.start, that.endex):
__pyx_t_2 = __pyx_v_that->endex; __pyx_t_3 = __pyx_t_2; for (__pyx_t_4 = __pyx_v_that->start; __pyx_t_4 < __pyx_t_3; __pyx_t_4+=1) { __pyx_v_index = __pyx_t_4;
+1904: that.blocks[index] = Block_Release(that.blocks[index])
(__pyx_v_that->blocks[__pyx_v_index]) = __pyx_f_10bytesparse_2_c_Block_Release((__pyx_v_that->blocks[__pyx_v_index])); }
+1905: PyMem_Free(that)
PyMem_Free(__pyx_v_that);
+1906: return NULL
__pyx_r = NULL; goto __pyx_L0;
1907:
1908:
+1909: cdef Rack_* Rack_ShallowCopy(const Rack_* other) except NULL:
static Rack_ *__pyx_f_10bytesparse_2_c_Rack_ShallowCopy(Rack_ const *__pyx_v_other) { Rack_ *__pyx_v_that; size_t __pyx_v_start1; size_t __pyx_v_start2; size_t __pyx_v_offset; Rack_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_ShallowCopy", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_9); __Pyx_XDECREF(__pyx_t_10); __Pyx_XDECREF(__pyx_t_11); __Pyx_AddTraceback("bytesparse._c.Rack_ShallowCopy", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
1910: cdef:
+1911: Rack_* that = Rack_Alloc(other.endex - other.start)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_Rack_Alloc((__pyx_v_other->endex - __pyx_v_other->start)); if (unlikely(__pyx_t_1 == ((Rack_ *)NULL))) __PYX_ERR(0, 1911, __pyx_L1_error)
__pyx_v_that = __pyx_t_1;
+1912: size_t start1 = that.start
__pyx_t_2 = __pyx_v_that->start; __pyx_v_start1 = __pyx_t_2;
+1913: size_t start2 = other.start
__pyx_t_2 = __pyx_v_other->start; __pyx_v_start2 = __pyx_t_2;
1914: size_t offset
1915:
+1916: try:
{ /*try:*/ { /* … */ } __pyx_L3_error:; /* … */ __Pyx_XGIVEREF(__pyx_t_3); __Pyx_XGIVEREF(__pyx_t_4); __Pyx_XGIVEREF(__pyx_t_5); __Pyx_ExceptionReset(__pyx_t_3, __pyx_t_4, __pyx_t_5); goto __pyx_L1_error; __pyx_L7_try_return:; __Pyx_XGIVEREF(__pyx_t_3); __Pyx_XGIVEREF(__pyx_t_4); __Pyx_XGIVEREF(__pyx_t_5); __Pyx_ExceptionReset(__pyx_t_3, __pyx_t_4, __pyx_t_5); goto __pyx_L0; }
+1917: for offset in range(that.endex - that.start):
__pyx_t_2 = (__pyx_v_that->endex - __pyx_v_that->start); __pyx_t_6 = __pyx_t_2; for (__pyx_t_7 = 0; __pyx_t_7 < __pyx_t_6; __pyx_t_7+=1) { __pyx_v_offset = __pyx_t_7;
+1918: that.blocks[start1 + offset] = Block_Acquire(other.blocks[start2 + offset])
__pyx_t_8 = __pyx_f_10bytesparse_2_c_Block_Acquire((__pyx_v_other->blocks[(__pyx_v_start2 + __pyx_v_offset)])); if (unlikely(__pyx_t_8 == ((Block_ *)NULL))) __PYX_ERR(0, 1918, __pyx_L3_error)
(__pyx_v_that->blocks[(__pyx_v_start1 + __pyx_v_offset)]) = __pyx_t_8;
}
+1919: return that
__pyx_r = __pyx_v_that; goto __pyx_L7_try_return;
1920:
+1921: except:
/*except:*/ { __Pyx_AddTraceback("bytesparse._c.Rack_ShallowCopy", __pyx_clineno, __pyx_lineno, __pyx_filename); if (__Pyx_GetException(&__pyx_t_9, &__pyx_t_10, &__pyx_t_11) < 0) __PYX_ERR(0, 1921, __pyx_L5_except_error) __Pyx_GOTREF(__pyx_t_9); __Pyx_GOTREF(__pyx_t_10); __Pyx_GOTREF(__pyx_t_11);
+1922: that = Rack_Free(that)
__pyx_v_that = __pyx_f_10bytesparse_2_c_Rack_Free(__pyx_v_that);
+1923: raise
__Pyx_GIVEREF(__pyx_t_9); __Pyx_GIVEREF(__pyx_t_10); __Pyx_XGIVEREF(__pyx_t_11); __Pyx_ErrRestoreWithState(__pyx_t_9, __pyx_t_10, __pyx_t_11); __pyx_t_9 = 0; __pyx_t_10 = 0; __pyx_t_11 = 0; __PYX_ERR(0, 1923, __pyx_L5_except_error) } __pyx_L5_except_error:;
1924:
1925:
+1926: cdef Rack_* Rack_Copy(const Rack_* other) except NULL:
static Rack_ *__pyx_f_10bytesparse_2_c_Rack_Copy(Rack_ const *__pyx_v_other) { Rack_ *__pyx_v_that; size_t __pyx_v_start1; size_t __pyx_v_start2; size_t __pyx_v_offset; Rack_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_Copy", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_9); __Pyx_XDECREF(__pyx_t_10); __Pyx_XDECREF(__pyx_t_11); __Pyx_AddTraceback("bytesparse._c.Rack_Copy", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
1927: cdef:
+1928: Rack_* that = Rack_Alloc(other.endex - other.start)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_Rack_Alloc((__pyx_v_other->endex - __pyx_v_other->start)); if (unlikely(__pyx_t_1 == ((Rack_ *)NULL))) __PYX_ERR(0, 1928, __pyx_L1_error)
__pyx_v_that = __pyx_t_1;
+1929: size_t start1 = that.start
__pyx_t_2 = __pyx_v_that->start; __pyx_v_start1 = __pyx_t_2;
+1930: size_t start2 = other.start
__pyx_t_2 = __pyx_v_other->start; __pyx_v_start2 = __pyx_t_2;
1931: size_t offset
1932:
+1933: try:
{ /*try:*/ { /* … */ } __pyx_L3_error:; /* … */ __Pyx_XGIVEREF(__pyx_t_3); __Pyx_XGIVEREF(__pyx_t_4); __Pyx_XGIVEREF(__pyx_t_5); __Pyx_ExceptionReset(__pyx_t_3, __pyx_t_4, __pyx_t_5); goto __pyx_L1_error; __pyx_L7_try_return:; __Pyx_XGIVEREF(__pyx_t_3); __Pyx_XGIVEREF(__pyx_t_4); __Pyx_XGIVEREF(__pyx_t_5); __Pyx_ExceptionReset(__pyx_t_3, __pyx_t_4, __pyx_t_5); goto __pyx_L0; }
+1934: for offset in range(that.endex - that.start):
__pyx_t_2 = (__pyx_v_that->endex - __pyx_v_that->start); __pyx_t_6 = __pyx_t_2; for (__pyx_t_7 = 0; __pyx_t_7 < __pyx_t_6; __pyx_t_7+=1) { __pyx_v_offset = __pyx_t_7;
+1935: that.blocks[start1 + offset] = Block_Copy(other.blocks[start2 + offset])
__pyx_t_8 = __pyx_f_10bytesparse_2_c_Block_Copy((__pyx_v_other->blocks[(__pyx_v_start2 + __pyx_v_offset)])); if (unlikely(__pyx_t_8 == ((Block_ *)NULL))) __PYX_ERR(0, 1935, __pyx_L3_error)
(__pyx_v_that->blocks[(__pyx_v_start1 + __pyx_v_offset)]) = __pyx_t_8;
}
+1936: return that
__pyx_r = __pyx_v_that; goto __pyx_L7_try_return;
1937:
+1938: except:
/*except:*/ { __Pyx_AddTraceback("bytesparse._c.Rack_Copy", __pyx_clineno, __pyx_lineno, __pyx_filename); if (__Pyx_GetException(&__pyx_t_9, &__pyx_t_10, &__pyx_t_11) < 0) __PYX_ERR(0, 1938, __pyx_L5_except_error) __Pyx_GOTREF(__pyx_t_9); __Pyx_GOTREF(__pyx_t_10); __Pyx_GOTREF(__pyx_t_11);
+1939: that = Rack_Free(that)
__pyx_v_that = __pyx_f_10bytesparse_2_c_Rack_Free(__pyx_v_that);
+1940: raise
__Pyx_GIVEREF(__pyx_t_9); __Pyx_GIVEREF(__pyx_t_10); __Pyx_XGIVEREF(__pyx_t_11); __Pyx_ErrRestoreWithState(__pyx_t_9, __pyx_t_10, __pyx_t_11); __pyx_t_9 = 0; __pyx_t_10 = 0; __pyx_t_11 = 0; __PYX_ERR(0, 1940, __pyx_L5_except_error) } __pyx_L5_except_error:;
1941:
1942:
+1943: cdef Rack_* Rack_FromObject(object obj, saddr_t offset) except NULL:
static Rack_ *__pyx_f_10bytesparse_2_c_Rack_FromObject(PyObject *__pyx_v_obj, saddr_t __pyx_v_offset) { Rack_ *__pyx_v_that; size_t __pyx_v_size; size_t __pyx_v_index; addr_t __pyx_v_address; PyObject *__pyx_v_data = NULL; Rack_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_FromObject", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_10); __Pyx_XDECREF(__pyx_t_12); __Pyx_XDECREF(__pyx_t_13); __Pyx_XDECREF(__pyx_t_14); __Pyx_XDECREF(__pyx_t_15); __Pyx_XDECREF(__pyx_t_22); __Pyx_XDECREF(__pyx_t_23); __Pyx_XDECREF(__pyx_t_24); __Pyx_AddTraceback("bytesparse._c.Rack_FromObject", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XDECREF(__pyx_v_data); __Pyx_RefNannyFinishContext(); return __pyx_r; }
1944: cdef:
+1945: Rack_* that = NULL
__pyx_v_that = NULL;
1946: size_t size
1947: size_t index
1948: addr_t address
1949:
+1950: try:
{ /*try:*/ { /* … */ } __pyx_L3_error:; __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; __Pyx_XDECREF(__pyx_t_15); __pyx_t_15 = 0; __Pyx_XDECREF(__pyx_t_22); __pyx_t_22 = 0; __Pyx_XDECREF(__pyx_t_23); __pyx_t_23 = 0; __Pyx_XDECREF(__pyx_t_24); __pyx_t_24 = 0; /* … */ __Pyx_XGIVEREF(__pyx_t_1); __Pyx_XGIVEREF(__pyx_t_2); __Pyx_XGIVEREF(__pyx_t_3); __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); goto __pyx_L1_error; __pyx_L7_try_return:; __Pyx_XGIVEREF(__pyx_t_1); __Pyx_XGIVEREF(__pyx_t_2); __Pyx_XGIVEREF(__pyx_t_3); __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); goto __pyx_L0; }
+1951: try:
{ /*try:*/ { /* … */ } /* … */ __Pyx_XGIVEREF(__pyx_t_4); __Pyx_XGIVEREF(__pyx_t_5); __Pyx_XGIVEREF(__pyx_t_6); __Pyx_ExceptionReset(__pyx_t_4, __pyx_t_5, __pyx_t_6); goto __pyx_L3_error; __pyx_L10_exception_handled:; __Pyx_XGIVEREF(__pyx_t_4); __Pyx_XGIVEREF(__pyx_t_5); __Pyx_XGIVEREF(__pyx_t_6); __Pyx_ExceptionReset(__pyx_t_4, __pyx_t_5, __pyx_t_6); __pyx_L14_try_end:; }
+1952: size = len(obj)
__pyx_t_7 = PyObject_Length(__pyx_v_obj); if (unlikely(__pyx_t_7 == ((Py_ssize_t)-1))) __PYX_ERR(0, 1952, __pyx_L9_error) __pyx_v_size = __pyx_t_7;
+1953: except TypeError:
__pyx_t_21 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_TypeError); if (__pyx_t_21) { __Pyx_AddTraceback("bytesparse._c.Rack_FromObject", __pyx_clineno, __pyx_lineno, __pyx_filename); if (__Pyx_GetException(&__pyx_t_10, &__pyx_t_12, &__pyx_t_14) < 0) __PYX_ERR(0, 1953, __pyx_L11_except_error) __Pyx_GOTREF(__pyx_t_10); __Pyx_GOTREF(__pyx_t_12); __Pyx_GOTREF(__pyx_t_14);
+1954: that = Rack_Alloc(0)
__pyx_t_8 = __pyx_f_10bytesparse_2_c_Rack_Alloc(0); if (unlikely(__pyx_t_8 == ((Rack_ *)NULL))) __PYX_ERR(0, 1954, __pyx_L11_except_error)
__pyx_v_that = __pyx_t_8;
+1955: for address, data in obj:
if (likely(PyList_CheckExact(__pyx_v_obj)) || PyTuple_CheckExact(__pyx_v_obj)) { __pyx_t_13 = __pyx_v_obj; __Pyx_INCREF(__pyx_t_13); __pyx_t_7 = 0; __pyx_t_11 = NULL; } else { __pyx_t_7 = -1; __pyx_t_13 = PyObject_GetIter(__pyx_v_obj); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 1955, __pyx_L11_except_error) __Pyx_GOTREF(__pyx_t_13); __pyx_t_11 = Py_TYPE(__pyx_t_13)->tp_iternext; if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 1955, __pyx_L11_except_error) } for (;;) { if (likely(!__pyx_t_11)) { if (likely(PyList_CheckExact(__pyx_t_13))) { if (__pyx_t_7 >= PyList_GET_SIZE(__pyx_t_13)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS __pyx_t_15 = PyList_GET_ITEM(__pyx_t_13, __pyx_t_7); __Pyx_INCREF(__pyx_t_15); __pyx_t_7++; if (unlikely(0 < 0)) __PYX_ERR(0, 1955, __pyx_L11_except_error) #else __pyx_t_15 = PySequence_ITEM(__pyx_t_13, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_15)) __PYX_ERR(0, 1955, __pyx_L11_except_error) __Pyx_GOTREF(__pyx_t_15); #endif } else { if (__pyx_t_7 >= PyTuple_GET_SIZE(__pyx_t_13)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS __pyx_t_15 = PyTuple_GET_ITEM(__pyx_t_13, __pyx_t_7); __Pyx_INCREF(__pyx_t_15); __pyx_t_7++; if (unlikely(0 < 0)) __PYX_ERR(0, 1955, __pyx_L11_except_error) #else __pyx_t_15 = PySequence_ITEM(__pyx_t_13, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_15)) __PYX_ERR(0, 1955, __pyx_L11_except_error) __Pyx_GOTREF(__pyx_t_15); #endif } } else { __pyx_t_15 = __pyx_t_11(__pyx_t_13); if (unlikely(!__pyx_t_15)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); else __PYX_ERR(0, 1955, __pyx_L11_except_error) } break; } __Pyx_GOTREF(__pyx_t_15); } if ((likely(PyTuple_CheckExact(__pyx_t_15))) || (PyList_CheckExact(__pyx_t_15))) { PyObject* sequence = __pyx_t_15; Py_ssize_t size = __Pyx_PySequence_SIZE(sequence); if (unlikely(size != 2)) { if (size > 2) __Pyx_RaiseTooManyValuesError(2); else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); __PYX_ERR(0, 1955, __pyx_L11_except_error) } #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS if (likely(PyTuple_CheckExact(sequence))) { __pyx_t_22 = PyTuple_GET_ITEM(sequence, 0); __pyx_t_23 = PyTuple_GET_ITEM(sequence, 1); } else { __pyx_t_22 = PyList_GET_ITEM(sequence, 0); __pyx_t_23 = PyList_GET_ITEM(sequence, 1); } __Pyx_INCREF(__pyx_t_22); __Pyx_INCREF(__pyx_t_23); #else __pyx_t_22 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_22)) __PYX_ERR(0, 1955, __pyx_L11_except_error) __Pyx_GOTREF(__pyx_t_22); __pyx_t_23 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_23)) __PYX_ERR(0, 1955, __pyx_L11_except_error) __Pyx_GOTREF(__pyx_t_23); #endif __Pyx_DECREF(__pyx_t_15); __pyx_t_15 = 0; } else { Py_ssize_t index = -1; __pyx_t_24 = PyObject_GetIter(__pyx_t_15); if (unlikely(!__pyx_t_24)) __PYX_ERR(0, 1955, __pyx_L11_except_error) __Pyx_GOTREF(__pyx_t_24); __Pyx_DECREF(__pyx_t_15); __pyx_t_15 = 0; __pyx_t_16 = Py_TYPE(__pyx_t_24)->tp_iternext; index = 0; __pyx_t_22 = __pyx_t_16(__pyx_t_24); if (unlikely(!__pyx_t_22)) goto __pyx_L24_unpacking_failed; __Pyx_GOTREF(__pyx_t_22); index = 1; __pyx_t_23 = __pyx_t_16(__pyx_t_24); if (unlikely(!__pyx_t_23)) goto __pyx_L24_unpacking_failed; __Pyx_GOTREF(__pyx_t_23); if (__Pyx_IternextUnpackEndCheck(__pyx_t_16(__pyx_t_24), 2) < 0) __PYX_ERR(0, 1955, __pyx_L11_except_error) __pyx_t_16 = NULL; __Pyx_DECREF(__pyx_t_24); __pyx_t_24 = 0; goto __pyx_L25_unpacking_done; __pyx_L24_unpacking_failed:; __Pyx_DECREF(__pyx_t_24); __pyx_t_24 = 0; __pyx_t_16 = NULL; if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); __PYX_ERR(0, 1955, __pyx_L11_except_error) __pyx_L25_unpacking_done:; } __pyx_t_17 = __Pyx_PyInt_As_uint_fast64_t(__pyx_t_22); if (unlikely((__pyx_t_17 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 1955, __pyx_L11_except_error) __Pyx_DECREF(__pyx_t_22); __pyx_t_22 = 0; __pyx_v_address = __pyx_t_17; __Pyx_XDECREF_SET(__pyx_v_data, __pyx_t_23); __pyx_t_23 = 0; /* … */ } __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0; __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; goto __pyx_L10_exception_handled; } goto __pyx_L11_except_error; __pyx_L11_except_error:;
+1956: if offset < 0:
__pyx_t_18 = ((__pyx_v_offset < 0) != 0); if (__pyx_t_18) { /* … */ goto __pyx_L26; }
+1957: CheckSubAddrU(address, <addr_t>-offset)
__pyx_t_19 = __pyx_f_10bytesparse_2_c_CheckSubAddrU(__pyx_v_address, ((addr_t)(-__pyx_v_offset))); if (unlikely(__pyx_t_19 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 1957, __pyx_L11_except_error)
+1958: address -= <addr_t>-offset
__pyx_v_address = (__pyx_v_address - ((addr_t)(-__pyx_v_offset)));
+1959: elif offset > 0:
__pyx_t_18 = ((__pyx_v_offset > 0) != 0); if (__pyx_t_18) { /* … */ } __pyx_L26:;
+1960: CheckAddAddrU(address, <addr_t>offset)
__pyx_t_19 = __pyx_f_10bytesparse_2_c_CheckAddAddrU(__pyx_v_address, ((addr_t)__pyx_v_offset)); if (unlikely(__pyx_t_19 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 1960, __pyx_L11_except_error)
+1961: address += <addr_t>offset
__pyx_v_address = (__pyx_v_address + ((addr_t)__pyx_v_offset));
+1962: that = Rack_Append(that, Block_FromObject(address, data, True))
__pyx_t_20 = __pyx_f_10bytesparse_2_c_Block_FromObject(__pyx_v_address, __pyx_v_data, 1); if (unlikely(__pyx_t_20 == ((Block_ *)NULL))) __PYX_ERR(0, 1962, __pyx_L11_except_error) __pyx_t_8 = __pyx_f_10bytesparse_2_c_Rack_Append(__pyx_v_that, __pyx_t_20); if (unlikely(__pyx_t_8 == ((Rack_ *)NULL))) __PYX_ERR(0, 1962, __pyx_L11_except_error) __pyx_v_that = __pyx_t_8;
1963: else:
+1964: that = Rack_Alloc(size)
/*else:*/ {
__pyx_t_8 = __pyx_f_10bytesparse_2_c_Rack_Alloc(__pyx_v_size); if (unlikely(__pyx_t_8 == ((Rack_ *)NULL))) __PYX_ERR(0, 1964, __pyx_L11_except_error)
__pyx_v_that = __pyx_t_8;
+1965: index = that.start
__pyx_t_9 = __pyx_v_that->start; __pyx_v_index = __pyx_t_9;
+1966: for address, data in obj:
if (likely(PyList_CheckExact(__pyx_v_obj)) || PyTuple_CheckExact(__pyx_v_obj)) { __pyx_t_10 = __pyx_v_obj; __Pyx_INCREF(__pyx_t_10); __pyx_t_7 = 0; __pyx_t_11 = NULL; } else { __pyx_t_7 = -1; __pyx_t_10 = PyObject_GetIter(__pyx_v_obj); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 1966, __pyx_L11_except_error) __Pyx_GOTREF(__pyx_t_10); __pyx_t_11 = Py_TYPE(__pyx_t_10)->tp_iternext; if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 1966, __pyx_L11_except_error) } for (;;) { if (likely(!__pyx_t_11)) { if (likely(PyList_CheckExact(__pyx_t_10))) { if (__pyx_t_7 >= PyList_GET_SIZE(__pyx_t_10)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS __pyx_t_12 = PyList_GET_ITEM(__pyx_t_10, __pyx_t_7); __Pyx_INCREF(__pyx_t_12); __pyx_t_7++; if (unlikely(0 < 0)) __PYX_ERR(0, 1966, __pyx_L11_except_error) #else __pyx_t_12 = PySequence_ITEM(__pyx_t_10, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 1966, __pyx_L11_except_error) __Pyx_GOTREF(__pyx_t_12); #endif } else { if (__pyx_t_7 >= PyTuple_GET_SIZE(__pyx_t_10)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS __pyx_t_12 = PyTuple_GET_ITEM(__pyx_t_10, __pyx_t_7); __Pyx_INCREF(__pyx_t_12); __pyx_t_7++; if (unlikely(0 < 0)) __PYX_ERR(0, 1966, __pyx_L11_except_error) #else __pyx_t_12 = PySequence_ITEM(__pyx_t_10, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 1966, __pyx_L11_except_error) __Pyx_GOTREF(__pyx_t_12); #endif } } else { __pyx_t_12 = __pyx_t_11(__pyx_t_10); if (unlikely(!__pyx_t_12)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); else __PYX_ERR(0, 1966, __pyx_L11_except_error) } break; } __Pyx_GOTREF(__pyx_t_12); } if ((likely(PyTuple_CheckExact(__pyx_t_12))) || (PyList_CheckExact(__pyx_t_12))) { PyObject* sequence = __pyx_t_12; Py_ssize_t size = __Pyx_PySequence_SIZE(sequence); if (unlikely(size != 2)) { if (size > 2) __Pyx_RaiseTooManyValuesError(2); else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); __PYX_ERR(0, 1966, __pyx_L11_except_error) } #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS if (likely(PyTuple_CheckExact(sequence))) { __pyx_t_13 = PyTuple_GET_ITEM(sequence, 0); __pyx_t_14 = PyTuple_GET_ITEM(sequence, 1); } else { __pyx_t_13 = PyList_GET_ITEM(sequence, 0); __pyx_t_14 = PyList_GET_ITEM(sequence, 1); } __Pyx_INCREF(__pyx_t_13); __Pyx_INCREF(__pyx_t_14); #else __pyx_t_13 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 1966, __pyx_L11_except_error) __Pyx_GOTREF(__pyx_t_13); __pyx_t_14 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 1966, __pyx_L11_except_error) __Pyx_GOTREF(__pyx_t_14); #endif __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; } else { Py_ssize_t index = -1; __pyx_t_15 = PyObject_GetIter(__pyx_t_12); if (unlikely(!__pyx_t_15)) __PYX_ERR(0, 1966, __pyx_L11_except_error) __Pyx_GOTREF(__pyx_t_15); __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; __pyx_t_16 = Py_TYPE(__pyx_t_15)->tp_iternext; index = 0; __pyx_t_13 = __pyx_t_16(__pyx_t_15); if (unlikely(!__pyx_t_13)) goto __pyx_L17_unpacking_failed; __Pyx_GOTREF(__pyx_t_13); index = 1; __pyx_t_14 = __pyx_t_16(__pyx_t_15); if (unlikely(!__pyx_t_14)) goto __pyx_L17_unpacking_failed; __Pyx_GOTREF(__pyx_t_14); if (__Pyx_IternextUnpackEndCheck(__pyx_t_16(__pyx_t_15), 2) < 0) __PYX_ERR(0, 1966, __pyx_L11_except_error) __pyx_t_16 = NULL; __Pyx_DECREF(__pyx_t_15); __pyx_t_15 = 0; goto __pyx_L18_unpacking_done; __pyx_L17_unpacking_failed:; __Pyx_DECREF(__pyx_t_15); __pyx_t_15 = 0; __pyx_t_16 = NULL; if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); __PYX_ERR(0, 1966, __pyx_L11_except_error) __pyx_L18_unpacking_done:; } __pyx_t_17 = __Pyx_PyInt_As_uint_fast64_t(__pyx_t_13); if (unlikely((__pyx_t_17 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 1966, __pyx_L11_except_error) __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0; __pyx_v_address = __pyx_t_17; __Pyx_XDECREF_SET(__pyx_v_data, __pyx_t_14); __pyx_t_14 = 0; /* … */ } __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; } __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; goto __pyx_L14_try_end; __pyx_L9_error:;
+1967: if offset < 0:
__pyx_t_18 = ((__pyx_v_offset < 0) != 0); if (__pyx_t_18) { /* … */ goto __pyx_L19; }
+1968: CheckSubAddrU(address, <addr_t>-offset)
__pyx_t_19 = __pyx_f_10bytesparse_2_c_CheckSubAddrU(__pyx_v_address, ((addr_t)(-__pyx_v_offset))); if (unlikely(__pyx_t_19 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 1968, __pyx_L11_except_error)
+1969: address -= <addr_t>-offset
__pyx_v_address = (__pyx_v_address - ((addr_t)(-__pyx_v_offset)));
+1970: elif offset > 0:
__pyx_t_18 = ((__pyx_v_offset > 0) != 0); if (__pyx_t_18) { /* … */ } __pyx_L19:;
+1971: CheckAddAddrU(address, <addr_t>offset)
__pyx_t_19 = __pyx_f_10bytesparse_2_c_CheckAddAddrU(__pyx_v_address, ((addr_t)__pyx_v_offset)); if (unlikely(__pyx_t_19 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 1971, __pyx_L11_except_error)
+1972: address += <addr_t>offset
__pyx_v_address = (__pyx_v_address + ((addr_t)__pyx_v_offset));
+1973: that.blocks[index] = Block_FromObject(address, data, True)
__pyx_t_20 = __pyx_f_10bytesparse_2_c_Block_FromObject(__pyx_v_address, __pyx_v_data, 1); if (unlikely(__pyx_t_20 == ((Block_ *)NULL))) __PYX_ERR(0, 1973, __pyx_L11_except_error)
(__pyx_v_that->blocks[__pyx_v_index]) = __pyx_t_20;
+1974: index += 1
__pyx_v_index = (__pyx_v_index + 1);
+1975: return that
__pyx_r = __pyx_v_that; goto __pyx_L7_try_return;
1976:
+1977: except:
/*except:*/ { __Pyx_AddTraceback("bytesparse._c.Rack_FromObject", __pyx_clineno, __pyx_lineno, __pyx_filename); if (__Pyx_GetException(&__pyx_t_14, &__pyx_t_12, &__pyx_t_10) < 0) __PYX_ERR(0, 1977, __pyx_L5_except_error) __Pyx_GOTREF(__pyx_t_14); __Pyx_GOTREF(__pyx_t_12); __Pyx_GOTREF(__pyx_t_10);
+1978: that = Rack_Free(that)
__pyx_v_that = __pyx_f_10bytesparse_2_c_Rack_Free(__pyx_v_that);
+1979: raise
__Pyx_GIVEREF(__pyx_t_14); __Pyx_GIVEREF(__pyx_t_12); __Pyx_XGIVEREF(__pyx_t_10); __Pyx_ErrRestoreWithState(__pyx_t_14, __pyx_t_12, __pyx_t_10); __pyx_t_14 = 0; __pyx_t_12 = 0; __pyx_t_10 = 0; __PYX_ERR(0, 1979, __pyx_L5_except_error) } __pyx_L5_except_error:;
1980:
1981:
+1982: cdef size_t Rack_Length(const Rack_* that) nogil:
static size_t __pyx_f_10bytesparse_2_c_Rack_Length(Rack_ const *__pyx_v_that) { size_t __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
+1983: return that.endex - that.start
__pyx_r = (__pyx_v_that->endex - __pyx_v_that->start); goto __pyx_L0;
1984:
1985:
+1986: cdef (addr_t, addr_t) Rack_BoundSlice(const Rack_* that, addr_t start, addr_t endex) nogil:
static __pyx_ctuple_addr_t__and_addr_t __pyx_f_10bytesparse_2_c_Rack_BoundSlice(Rack_ const *__pyx_v_that, addr_t __pyx_v_start, addr_t __pyx_v_endex) { Block_ const *__pyx_v_block; addr_t __pyx_v_block_start; addr_t __pyx_v_block_endex; __pyx_ctuple_addr_t__and_addr_t __pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
1987: cdef:
1988: const Block_* block
1989: addr_t block_start
1990: addr_t block_endex
1991:
+1992: if that.start < that.endex:
__pyx_t_1 = ((__pyx_v_that->start < __pyx_v_that->endex) != 0); if (__pyx_t_1) { /* … */ goto __pyx_L3; }
+1993: block = that.blocks[that.start]
__pyx_v_block = (__pyx_v_that->blocks[__pyx_v_that->start]);
+1994: block_start = Block_Start(block)
__pyx_v_block_start = __pyx_f_10bytesparse_2_c_Block_Start(__pyx_v_block);
+1995: if start < block_start:
__pyx_t_1 = ((__pyx_v_start < __pyx_v_block_start) != 0); if (__pyx_t_1) { /* … */ }
+1996: start = block_start
__pyx_v_start = __pyx_v_block_start;
+1997: if endex < start:
__pyx_t_1 = ((__pyx_v_endex < __pyx_v_start) != 0); if (__pyx_t_1) { /* … */ }
+1998: endex = start
__pyx_v_endex = __pyx_v_start;
1999:
+2000: block = that.blocks[that.endex - 1]
__pyx_v_block = (__pyx_v_that->blocks[(__pyx_v_that->endex - 1)]);
+2001: block_endex = Block_Endex(block)
__pyx_v_block_endex = __pyx_f_10bytesparse_2_c_Block_Endex(__pyx_v_block);
+2002: if endex > block_endex:
__pyx_t_1 = ((__pyx_v_endex > __pyx_v_block_endex) != 0); if (__pyx_t_1) { /* … */ }
+2003: endex = block_endex
__pyx_v_endex = __pyx_v_block_endex;
+2004: if start > endex:
__pyx_t_1 = ((__pyx_v_start > __pyx_v_endex) != 0); if (__pyx_t_1) { /* … */ }
+2005: start = endex
__pyx_v_start = __pyx_v_endex;
2006: else:
+2007: start = 0
/*else*/ { __pyx_v_start = 0;
+2008: endex = 0
__pyx_v_endex = 0; } __pyx_L3:;
2009:
+2010: return start, endex
__pyx_t_2.f0 = __pyx_v_start; __pyx_t_2.f1 = __pyx_v_endex; __pyx_r = __pyx_t_2; goto __pyx_L0;
2011:
2012:
+2013: cdef Rack_* Rack_Shift_(Rack_* that, addr_t offset) except NULL:
static Rack_ *__pyx_f_10bytesparse_2_c_Rack_Shift_(Rack_ *__pyx_v_that, addr_t __pyx_v_offset) { size_t __pyx_v_index; Block_ *__pyx_v_block; Rack_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_Shift_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Rack_Shift_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
2014: cdef:
2015: size_t index
2016: Block_* block
2017:
+2018: if offset:
__pyx_t_1 = (__pyx_v_offset != 0); if (__pyx_t_1) { /* … */ }
+2019: if that.start < that.endex:
__pyx_t_1 = ((__pyx_v_that->start < __pyx_v_that->endex) != 0); if (__pyx_t_1) { /* … */ }
+2020: block = that.blocks[that.endex - 1]
__pyx_v_block = (__pyx_v_that->blocks[(__pyx_v_that->endex - 1)]);
+2021: CheckAddAddrU(block.address, offset)
__pyx_t_2 = __pyx_f_10bytesparse_2_c_CheckAddAddrU(__pyx_v_block->address, __pyx_v_offset); if (unlikely(__pyx_t_2 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 2021, __pyx_L1_error)
2022:
+2023: for index in range(that.start, that.endex):
__pyx_t_3 = __pyx_v_that->endex; __pyx_t_4 = __pyx_t_3; for (__pyx_t_5 = __pyx_v_that->start; __pyx_t_5 < __pyx_t_4; __pyx_t_5+=1) { __pyx_v_index = __pyx_t_5;
+2024: block = that.blocks[index]
__pyx_v_block = (__pyx_v_that->blocks[__pyx_v_index]);
+2025: block.address += offset
__pyx_v_block->address = (__pyx_v_block->address + __pyx_v_offset); }
+2026: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
2027:
2028:
+2029: cdef Rack_* Rack_Shift(Rack_* that, saddr_t offset) except NULL:
static Rack_ *__pyx_f_10bytesparse_2_c_Rack_Shift(Rack_ *__pyx_v_that, saddr_t __pyx_v_offset) { size_t __pyx_v_index; Block_ *__pyx_v_block; addr_t __pyx_v_offset_; Rack_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_Shift", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Rack_Shift", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
2030: cdef:
2031: size_t index
2032: Block_* block
2033: addr_t offset_
2034:
+2035: if offset:
__pyx_t_1 = (__pyx_v_offset != 0); if (__pyx_t_1) { /* … */ }
+2036: if that.start < that.endex:
__pyx_t_1 = ((__pyx_v_that->start < __pyx_v_that->endex) != 0); if (__pyx_t_1) { /* … */ }
+2037: if offset < 0:
__pyx_t_1 = ((__pyx_v_offset < 0) != 0); if (__pyx_t_1) { /* … */ goto __pyx_L5; }
+2038: block = that.blocks[that.start]
__pyx_v_block = (__pyx_v_that->blocks[__pyx_v_that->start]);
+2039: offset_ = <addr_t>-offset
__pyx_v_offset_ = ((addr_t)(-__pyx_v_offset));
+2040: CheckSubAddrU(block.address, offset_)
__pyx_t_2 = __pyx_f_10bytesparse_2_c_CheckSubAddrU(__pyx_v_block->address, __pyx_v_offset_); if (unlikely(__pyx_t_2 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 2040, __pyx_L1_error)
2041:
+2042: for index in range(that.start, that.endex):
__pyx_t_3 = __pyx_v_that->endex; __pyx_t_4 = __pyx_t_3; for (__pyx_t_5 = __pyx_v_that->start; __pyx_t_5 < __pyx_t_4; __pyx_t_5+=1) { __pyx_v_index = __pyx_t_5;
+2043: block = that.blocks[index]
__pyx_v_block = (__pyx_v_that->blocks[__pyx_v_index]);
+2044: block.address -= offset_
__pyx_v_block->address = (__pyx_v_block->address - __pyx_v_offset_); }
2045: else:
+2046: block = that.blocks[that.endex - 1]
/*else*/ { __pyx_v_block = (__pyx_v_that->blocks[(__pyx_v_that->endex - 1)]);
+2047: offset_ = <addr_t>offset
__pyx_v_offset_ = ((addr_t)__pyx_v_offset);
+2048: CheckAddAddrU(block.address, offset_)
__pyx_t_2 = __pyx_f_10bytesparse_2_c_CheckAddAddrU(__pyx_v_block->address, __pyx_v_offset_); if (unlikely(__pyx_t_2 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 2048, __pyx_L1_error)
2049:
+2050: for index in range(that.start, that.endex):
__pyx_t_3 = __pyx_v_that->endex; __pyx_t_4 = __pyx_t_3; for (__pyx_t_5 = __pyx_v_that->start; __pyx_t_5 < __pyx_t_4; __pyx_t_5+=1) { __pyx_v_index = __pyx_t_5;
+2051: block = that.blocks[index]
__pyx_v_block = (__pyx_v_that->blocks[__pyx_v_index]);
+2052: block.address += offset_
__pyx_v_block->address = (__pyx_v_block->address + __pyx_v_offset_); } } __pyx_L5:;
+2053: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
2054:
2055:
+2056: cdef bint Rack_Eq(const Rack_* that, const Rack_* other) except -1:
static int __pyx_f_10bytesparse_2_c_Rack_Eq(Rack_ const *__pyx_v_that, Rack_ const *__pyx_v_other) { size_t __pyx_v_block_count; size_t __pyx_v_block_index; CYTHON_UNUSED size_t __pyx_v_block_length; Block_ const *__pyx_v_block1; Block_ const *__pyx_v_block2; int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_Eq", 0); /* … */ /* function exit code */ __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
2057: cdef:
+2058: size_t block_count = that.endex - that.start
__pyx_v_block_count = (__pyx_v_that->endex - __pyx_v_that->start);
2059: size_t block_index
2060: size_t block_length
2061: const Block_* block1
2062: const Block_* block2
2063:
+2064: if block_count != other.endex - other.start:
__pyx_t_1 = ((__pyx_v_block_count != (__pyx_v_other->endex - __pyx_v_other->start)) != 0); if (__pyx_t_1) { /* … */ }
+2065: return False
__pyx_r = 0; goto __pyx_L0;
2066:
+2067: for block_index in range(block_count):
__pyx_t_2 = __pyx_v_block_count; __pyx_t_3 = __pyx_t_2; for (__pyx_t_4 = 0; __pyx_t_4 < __pyx_t_3; __pyx_t_4+=1) { __pyx_v_block_index = __pyx_t_4;
+2068: block1 = Rack_Get__(that, block_index)
__pyx_v_block1 = __pyx_f_10bytesparse_2_c_Rack_Get__(__pyx_v_that, __pyx_v_block_index);
+2069: block2 = Rack_Get__(other, block_index)
__pyx_v_block2 = __pyx_f_10bytesparse_2_c_Rack_Get__(__pyx_v_other, __pyx_v_block_index);
+2070: block_length = Block_Length(block1)
__pyx_v_block_length = __pyx_f_10bytesparse_2_c_Block_Length(__pyx_v_block1);
2071:
+2072: if block1.address != block2.address:
__pyx_t_1 = ((__pyx_v_block1->address != __pyx_v_block2->address) != 0); if (__pyx_t_1) { /* … */ }
+2073: return False
__pyx_r = 0; goto __pyx_L0;
2074:
+2075: if not Block_Eq(block1, block2):
__pyx_t_1 = ((!(__pyx_f_10bytesparse_2_c_Block_Eq(__pyx_v_block1, __pyx_v_block2) != 0)) != 0); if (__pyx_t_1) { /* … */ } }
+2076: return False
__pyx_r = 0; goto __pyx_L0;
2077:
+2078: return True
__pyx_r = 1; goto __pyx_L0;
2079:
2080:
+2081: cdef Rack_* Rack_Reserve_(Rack_* that, size_t offset, size_t size) except NULL:
static Rack_ *__pyx_f_10bytesparse_2_c_Rack_Reserve_(Rack_ *__pyx_v_that, size_t __pyx_v_offset, size_t __pyx_v_size) { size_t __pyx_v_used; size_t __pyx_v_allocated; Rack_ *__pyx_v_ptr; Rack_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_Reserve_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("bytesparse._c.Rack_Reserve_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
2082: cdef:
2083: size_t used
2084: size_t margin
2085: size_t allocated
2086: Rack_* ptr
2087: size_t index
2088: Block_* node
2089:
+2090: if size:
__pyx_t_1 = (__pyx_v_size != 0); if (__pyx_t_1) { /* … */ }
+2091: if ((size > SIZE_HMAX or
__pyx_t_2 = ((__pyx_v_size > SIZE_HMAX) != 0); if (!__pyx_t_2) { } else { __pyx_t_1 = __pyx_t_2; goto __pyx_L5_bool_binop_done; } /* … */ if (unlikely(__pyx_t_1)) { /* … */ }
+2092: CannotAddSizeU(that.endex, size) or
__pyx_t_2 = (__pyx_f_10bytesparse_2_c_CannotAddSizeU(__pyx_v_that->endex, __pyx_v_size) != 0); if (!__pyx_t_2) { } else { __pyx_t_1 = __pyx_t_2; goto __pyx_L5_bool_binop_done; }
+2093: that.endex + size > SIZE_HMAX)):
__pyx_t_2 = (((__pyx_v_that->endex + __pyx_v_size) > SIZE_HMAX) != 0); __pyx_t_1 = __pyx_t_2; __pyx_L5_bool_binop_done:;
+2094: raise OverflowError('size overflow')
__pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_OverflowError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 2094, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_Raise(__pyx_t_3, 0, 0, 0); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(0, 2094, __pyx_L1_error)
2095:
+2096: used = that.endex - that.start
__pyx_v_used = (__pyx_v_that->endex - __pyx_v_that->start);
+2097: if offset > used:
__pyx_t_1 = ((__pyx_v_offset > __pyx_v_used) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+2098: raise IndexError('index out of range')
__pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_IndexError, __pyx_tuple__6, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 2098, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_Raise(__pyx_t_3, 0, 0, 0); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(0, 2098, __pyx_L1_error)
2099:
2100: # Prefer the side where there is less data to shift, which also favors the extremes
+2101: if offset >= (used >> 1):
__pyx_t_1 = ((__pyx_v_offset >= (__pyx_v_used >> 1)) != 0); if (__pyx_t_1) { /* … */ goto __pyx_L9; }
+2102: if size > that.allocated - that.endex:
__pyx_t_1 = ((__pyx_v_size > (__pyx_v_that->allocated - __pyx_v_that->endex)) != 0); if (__pyx_t_1) { /* … */ }
2103: # Calculate the upsized allocation
+2104: allocated = Upsize(that.allocated, that.allocated + size)
__pyx_v_allocated = __pyx_f_10bytesparse_2_c_Upsize(__pyx_v_that->allocated, (__pyx_v_that->allocated + __pyx_v_size));
+2105: if allocated > SIZE_HMAX:
__pyx_t_1 = ((__pyx_v_allocated > SIZE_HMAX) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+2106: raise MemoryError()
PyErr_NoMemory(); __PYX_ERR(0, 2106, __pyx_L1_error)
2107:
2108: # Reallocate, including the header
+2109: ptr = <Rack_*>PyMem_Realloc(that, Rack_HEADING + (allocated * sizeof(Block_*)))
__pyx_v_ptr = ((Rack_ *)PyMem_Realloc(__pyx_v_that, (Rack_HEADING + (__pyx_v_allocated * (sizeof(Block_ *))))));
+2110: if ptr == NULL:
__pyx_t_1 = ((__pyx_v_ptr == NULL) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+2111: raise MemoryError()
PyErr_NoMemory(); __PYX_ERR(0, 2111, __pyx_L1_error)
2112:
2113: # Reassign to that
+2114: that = ptr
__pyx_v_that = __pyx_v_ptr;
+2115: that.allocated = allocated # update
__pyx_v_that->allocated = __pyx_v_allocated;
2116:
2117: # Shift elements to make room for reservation at the requested offset
+2118: CheckAddSizeU(offset, that.start)
__pyx_t_4 = __pyx_f_10bytesparse_2_c_CheckAddSizeU(__pyx_v_offset, __pyx_v_that->start); if (unlikely(__pyx_t_4 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 2118, __pyx_L1_error)
+2119: offset += that.start
__pyx_v_offset = (__pyx_v_offset + __pyx_v_that->start);
+2120: used = that.endex - offset
__pyx_v_used = (__pyx_v_that->endex - __pyx_v_offset);
+2121: if used:
__pyx_t_1 = (__pyx_v_used != 0); if (__pyx_t_1) { /* … */ }
+2122: memmove(&that.blocks[offset + size],
(void)(memmove((&(__pyx_v_that->blocks[(__pyx_v_offset + __pyx_v_size)])), (&(__pyx_v_that->blocks[__pyx_v_offset])), (__pyx_v_used * (sizeof(Block_ *)))));
2123: &that.blocks[offset],
2124: used * sizeof(Block_*))
2125:
+2126: memset(&that.blocks[offset], 0, size * sizeof(Block_*)) # pad with zeros
(void)(memset((&(__pyx_v_that->blocks[__pyx_v_offset])), 0, (__pyx_v_size * (sizeof(Block_ *)))));
+2127: that.endex += size
__pyx_v_that->endex = (__pyx_v_that->endex + __pyx_v_size);
2128:
2129: else:
+2130: if size <= that.start:
/*else*/ { __pyx_t_1 = ((__pyx_v_size <= __pyx_v_that->start) != 0); if (__pyx_t_1) { /* … */ goto __pyx_L14; }
2131: # Shift elements to make room for reservation at the requested offset
+2132: that.start -= size
__pyx_v_that->start = (__pyx_v_that->start - __pyx_v_size);
+2133: if offset:
__pyx_t_1 = (__pyx_v_offset != 0); if (__pyx_t_1) { /* … */ }
+2134: memmove(&that.blocks[that.start],
(void)(memmove((&(__pyx_v_that->blocks[__pyx_v_that->start])), (&(__pyx_v_that->blocks[(__pyx_v_that->start + __pyx_v_size)])), (__pyx_v_offset * (sizeof(Block_ *)))));
2135: &that.blocks[that.start + size],
2136: offset * sizeof(Block_*))
2137:
+2138: memset(&that.blocks[that.start + offset], 0, size * sizeof(Block_*)) # pad with zeros
(void)(memset((&(__pyx_v_that->blocks[(__pyx_v_that->start + __pyx_v_offset)])), 0, (__pyx_v_size * (sizeof(Block_ *)))));
2139:
2140: else:
2141: # Calculate the upsized allocation
+2142: CheckAddSizeU(that.allocated, size)
/*else*/ {
__pyx_t_4 = __pyx_f_10bytesparse_2_c_CheckAddSizeU(__pyx_v_that->allocated, __pyx_v_size); if (unlikely(__pyx_t_4 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 2142, __pyx_L1_error)
+2143: allocated = Upsize(that.allocated, that.allocated + size)
__pyx_v_allocated = __pyx_f_10bytesparse_2_c_Upsize(__pyx_v_that->allocated, (__pyx_v_that->allocated + __pyx_v_size));
+2144: if allocated > SIZE_HMAX:
__pyx_t_1 = ((__pyx_v_allocated > SIZE_HMAX) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+2145: raise MemoryError()
PyErr_NoMemory(); __PYX_ERR(0, 2145, __pyx_L1_error)
2146:
2147: # Allocate a new chunk, including the header
+2148: ptr = <Rack_*>PyMem_Calloc(Rack_HEADING + (allocated * sizeof(Block_*)), 1, True)
__pyx_v_ptr = ((Rack_ *)__pyx_f_10bytesparse_2_c_PyMem_Calloc((Rack_HEADING + (__pyx_v_allocated * (sizeof(Block_ *)))), 1, 1));
+2149: if ptr == NULL:
__pyx_t_1 = ((__pyx_v_ptr == NULL) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+2150: raise MemoryError()
PyErr_NoMemory(); __PYX_ERR(0, 2150, __pyx_L1_error)
2151:
2152: # Prepare the new chunk aligning towards the end
+2153: ptr.allocated = allocated
__pyx_v_ptr->allocated = __pyx_v_allocated;
+2154: ptr.endex = ptr.allocated - MARGIN # leave some room
__pyx_v_ptr->endex = (__pyx_v_ptr->allocated - MARGIN);
+2155: ptr.start = ptr.endex - used - size
__pyx_v_ptr->start = ((__pyx_v_ptr->endex - __pyx_v_used) - __pyx_v_size);
2156:
2157: # Shift/copy elements to make room for reservation at the requested offset
+2158: if offset:
__pyx_t_1 = (__pyx_v_offset != 0); if (__pyx_t_1) { /* … */ }
+2159: used -= offset # prepare for later
__pyx_v_used = (__pyx_v_used - __pyx_v_offset);
+2160: memcpy(&ptr.blocks[ptr.start],
(void)(memcpy((&(__pyx_v_ptr->blocks[__pyx_v_ptr->start])), (&(__pyx_v_that->blocks[__pyx_v_that->start])), (__pyx_v_offset * (sizeof(Block_ *)))));
2161: &that.blocks[that.start],
2162: offset * sizeof(Block_*))
+2163: if used:
__pyx_t_1 = (__pyx_v_used != 0); if (__pyx_t_1) { /* … */ }
+2164: memcpy(&ptr.blocks[ptr.start + offset + size],
(void)(memcpy((&(__pyx_v_ptr->blocks[((__pyx_v_ptr->start + __pyx_v_offset) + __pyx_v_size)])), (&(__pyx_v_that->blocks[(__pyx_v_that->start + __pyx_v_offset)])), (__pyx_v_used * (sizeof(Block_ *)))));
2165: &that.blocks[that.start + offset],
2166: used * sizeof(Block_*))
2167:
2168: # Reassign to that
+2169: PyMem_Free(that)
PyMem_Free(__pyx_v_that);
+2170: that = ptr
__pyx_v_that = __pyx_v_ptr; } __pyx_L14:; } __pyx_L9:;
2171:
+2172: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
2173:
2174:
+2175: cdef Rack_* Rack_Delete_(Rack_* that, size_t offset, size_t size) except NULL:
static Rack_ *__pyx_f_10bytesparse_2_c_Rack_Delete_(Rack_ *__pyx_v_that, size_t __pyx_v_offset, size_t __pyx_v_size) { size_t __pyx_v_allocated; Rack_ *__pyx_v_ptr; size_t __pyx_v_index; Rack_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_Delete_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("bytesparse._c.Rack_Delete_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
2176: cdef:
2177: size_t allocated
2178: Rack_* ptr
2179: size_t index
2180: Block_* node
2181:
+2182: if size:
__pyx_t_1 = (__pyx_v_size != 0); if (__pyx_t_1) { /* … */ }
+2183: if ((size > SIZE_HMAX or
__pyx_t_2 = ((__pyx_v_size > SIZE_HMAX) != 0); if (!__pyx_t_2) { } else { __pyx_t_1 = __pyx_t_2; goto __pyx_L5_bool_binop_done; } /* … */ if (unlikely(__pyx_t_1)) { /* … */ }
+2184: CannotAddSizeU(offset, size) or
__pyx_t_2 = (__pyx_f_10bytesparse_2_c_CannotAddSizeU(__pyx_v_offset, __pyx_v_size) != 0); if (!__pyx_t_2) { } else { __pyx_t_1 = __pyx_t_2; goto __pyx_L5_bool_binop_done; }
+2185: offset + size > SIZE_HMAX or
__pyx_t_2 = (((__pyx_v_offset + __pyx_v_size) > SIZE_HMAX) != 0); if (!__pyx_t_2) { } else { __pyx_t_1 = __pyx_t_2; goto __pyx_L5_bool_binop_done; }
+2186: CannotAddSizeU(offset, that.start) or
__pyx_t_2 = (__pyx_f_10bytesparse_2_c_CannotAddSizeU(__pyx_v_offset, __pyx_v_that->start) != 0); if (!__pyx_t_2) { } else { __pyx_t_1 = __pyx_t_2; goto __pyx_L5_bool_binop_done; }
+2187: that.start > SIZE_HMAX)):
__pyx_t_2 = ((__pyx_v_that->start > SIZE_HMAX) != 0); __pyx_t_1 = __pyx_t_2; __pyx_L5_bool_binop_done:;
+2188: raise OverflowError('size overflow')
__pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_OverflowError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 2188, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_Raise(__pyx_t_3, 0, 0, 0); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(0, 2188, __pyx_L1_error)
2189:
+2190: if that.endex < that.start + offset + size:
__pyx_t_1 = ((__pyx_v_that->endex < ((__pyx_v_that->start + __pyx_v_offset) + __pyx_v_size)) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+2191: raise IndexError('index out of range')
__pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_IndexError, __pyx_tuple__6, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 2191, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_Raise(__pyx_t_3, 0, 0, 0); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(0, 2191, __pyx_L1_error)
2192:
2193: # Calculate the downsized allocation
+2194: allocated = Downsize(that.allocated, that.allocated - size)
__pyx_v_allocated = __pyx_f_10bytesparse_2_c_Downsize(__pyx_v_that->allocated, (__pyx_v_that->allocated - __pyx_v_size));
+2195: if allocated > SIZE_HMAX:
__pyx_t_1 = ((__pyx_v_allocated > SIZE_HMAX) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+2196: raise MemoryError()
PyErr_NoMemory(); __PYX_ERR(0, 2196, __pyx_L1_error)
2197:
2198: # Release blocks within the deleted range
+2199: offset += that.start
__pyx_v_offset = (__pyx_v_offset + __pyx_v_that->start);
+2200: for index in range(offset, offset + size):
__pyx_t_4 = (__pyx_v_offset + __pyx_v_size); __pyx_t_5 = __pyx_t_4; for (__pyx_t_6 = __pyx_v_offset; __pyx_t_6 < __pyx_t_5; __pyx_t_6+=1) { __pyx_v_index = __pyx_t_6;
+2201: that.blocks[index] = Block_Release(that.blocks[index])
(__pyx_v_that->blocks[__pyx_v_index]) = __pyx_f_10bytesparse_2_c_Block_Release((__pyx_v_that->blocks[__pyx_v_index])); }
2202:
+2203: if offset == 0:
__pyx_t_1 = ((__pyx_v_offset == 0) != 0); if (__pyx_t_1) { /* … */ goto __pyx_L14; }
+2204: if allocated == that.allocated:
__pyx_t_1 = ((__pyx_v_allocated == __pyx_v_that->allocated) != 0); if (__pyx_t_1) { /* … */ goto __pyx_L15; }
2205: # Just skip initial if not reallocated and no offset
+2206: memset(&that.blocks[that.start], 0, size * sizeof(Block_*)) # cleanup margin
(void)(memset((&(__pyx_v_that->blocks[__pyx_v_that->start])), 0, (__pyx_v_size * (sizeof(Block_ *)))));
+2207: that.start += size
__pyx_v_that->start = (__pyx_v_that->start + __pyx_v_size);
2208: else:
2209: # Shift elements to make for the deleted gap at the beginning
+2210: offset += that.start
/*else*/ { __pyx_v_offset = (__pyx_v_offset + __pyx_v_that->start);
+2211: memmove(&that.blocks[MARGIN], # realign to initial MARGIN
(void)(memmove((&(__pyx_v_that->blocks[MARGIN])), (&(__pyx_v_that->blocks[(__pyx_v_offset + __pyx_v_size)])), ((__pyx_v_that->endex - (__pyx_v_offset + __pyx_v_size)) * (sizeof(Block_ *)))));
2212: &that.blocks[offset + size],
2213: (that.endex - (offset + size)) * sizeof(Block_*))
+2214: size = that.endex - that.start - size
__pyx_v_size = ((__pyx_v_that->endex - __pyx_v_that->start) - __pyx_v_size);
+2215: that.start = MARGIN
__pyx_v_that->start = MARGIN;
+2216: that.endex = MARGIN + size
__pyx_v_that->endex = (MARGIN + __pyx_v_size);
2217:
2218: # Cleanup margins
+2219: memset(&that.blocks[0], 0, that.start * sizeof(Block_*))
(void)(memset((&(__pyx_v_that->blocks[0])), 0, (__pyx_v_that->start * (sizeof(Block_ *)))));
+2220: memset(&that.blocks[that.endex], 0, (that.allocated - that.endex) * sizeof(Block_*))
(void)(memset((&(__pyx_v_that->blocks[__pyx_v_that->endex])), 0, ((__pyx_v_that->allocated - __pyx_v_that->endex) * (sizeof(Block_ *))))); } __pyx_L15:;
2221: else:
2222: # Shift elements to make for the deleted gap at the requested offset
+2223: memmove(&that.blocks[offset],
/*else*/ { /* … */ (void)(memmove((&(__pyx_v_that->blocks[__pyx_v_offset])), (&(__pyx_v_that->blocks[(__pyx_v_offset + __pyx_v_size)])), ((__pyx_v_that->endex - (__pyx_v_offset + __pyx_v_size)) * (sizeof(Block_ *)))));
2224: &that.blocks[offset + size],
2225: (that.endex - (offset + size)) * sizeof(Block_*))
+2226: that.endex -= size
__pyx_v_that->endex = (__pyx_v_that->endex - __pyx_v_size);
+2227: memset(&that.blocks[that.endex], 0, size * sizeof(Block_*)) # cleanup margin
(void)(memset((&(__pyx_v_that->blocks[__pyx_v_that->endex])), 0, (__pyx_v_size * (sizeof(Block_ *))))); } __pyx_L14:;
2228:
+2229: if allocated != that.allocated:
__pyx_t_1 = ((__pyx_v_allocated != __pyx_v_that->allocated) != 0); if (__pyx_t_1) { /* … */ }
2230: # Reallocate, including the header
+2231: ptr = <Rack_*>PyMem_Realloc(that, Rack_HEADING + (allocated * sizeof(Block_*)))
__pyx_v_ptr = ((Rack_ *)PyMem_Realloc(__pyx_v_that, (Rack_HEADING + (__pyx_v_allocated * (sizeof(Block_ *))))));
+2232: if ptr == NULL:
__pyx_t_1 = ((__pyx_v_ptr == NULL) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+2233: raise MemoryError()
PyErr_NoMemory(); __PYX_ERR(0, 2233, __pyx_L1_error)
2234:
2235: # Reassign to that
+2236: that = ptr
__pyx_v_that = __pyx_v_ptr;
+2237: that.allocated = allocated
__pyx_v_that->allocated = __pyx_v_allocated;
2238:
+2239: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
2240:
2241:
+2242: cdef Rack_* Rack_Clear(Rack_* that) except NULL:
static Rack_ *__pyx_f_10bytesparse_2_c_Rack_Clear(Rack_ *__pyx_v_that) { Rack_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_Clear", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Rack_Clear", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+2243: return Rack_Delete_(that, 0, that.endex - that.start)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_Rack_Delete_(__pyx_v_that, 0, (__pyx_v_that->endex - __pyx_v_that->start)); if (unlikely(__pyx_t_1 == ((Rack_ *)NULL))) __PYX_ERR(0, 2243, __pyx_L1_error)
__pyx_r = __pyx_t_1;
goto __pyx_L0;
2244:
2245:
+2246: cdef Rack_* Rack_Consolidate(Rack_* that) except NULL:
static Rack_ *__pyx_f_10bytesparse_2_c_Rack_Consolidate(Rack_ *__pyx_v_that) { size_t __pyx_v_offset; Block_ *__pyx_v_block; Rack_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_Consolidate", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Rack_Consolidate", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
2247: cdef:
2248: size_t offset
2249: Block_* block
2250:
+2251: for offset in range(that.start, that.endex):
__pyx_t_1 = __pyx_v_that->endex; __pyx_t_2 = __pyx_t_1; for (__pyx_t_3 = __pyx_v_that->start; __pyx_t_3 < __pyx_t_2; __pyx_t_3+=1) { __pyx_v_offset = __pyx_t_3;
+2252: block = that.blocks[offset]
__pyx_v_block = (__pyx_v_that->blocks[__pyx_v_offset]);
+2253: if block.references > 1:
__pyx_t_4 = ((__pyx_v_block->references > 1) != 0); if (__pyx_t_4) { /* … */ } }
+2254: that.blocks[offset] = Block_Copy(block)
__pyx_t_5 = __pyx_f_10bytesparse_2_c_Block_Copy(__pyx_v_block); if (unlikely(__pyx_t_5 == ((Block_ *)NULL))) __PYX_ERR(0, 2254, __pyx_L1_error)
(__pyx_v_that->blocks[__pyx_v_offset]) = __pyx_t_5;
+2255: block = Block_Release(block)
__pyx_v_block = __pyx_f_10bytesparse_2_c_Block_Release(__pyx_v_block);
+2256: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
2257:
2258:
+2259: cdef Block_** Rack_At_(Rack_* that, size_t offset) nogil:
static Block_ **__pyx_f_10bytesparse_2_c_Rack_At_(Rack_ *__pyx_v_that, size_t __pyx_v_offset) { Block_ **__pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
+2260: return &that.blocks[that.start + offset]
__pyx_r = (&(__pyx_v_that->blocks[(__pyx_v_that->start + __pyx_v_offset)])); goto __pyx_L0;
2261:
2262:
+2263: cdef const Block_** Rack_At__(const Rack_* that, size_t offset) nogil:
static Block_ const **__pyx_f_10bytesparse_2_c_Rack_At__(Rack_ const *__pyx_v_that, size_t __pyx_v_offset) { Block_ const **__pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
+2264: return <const Block_**>&that.blocks[that.start + offset]
__pyx_r = ((Block_ const **)(&(__pyx_v_that->blocks[(__pyx_v_that->start + __pyx_v_offset)]))); goto __pyx_L0;
2265:
2266:
+2267: cdef Block_* Rack_First_(Rack_* that) nogil:
static Block_ *__pyx_f_10bytesparse_2_c_Rack_First_(Rack_ *__pyx_v_that) { Block_ *__pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
+2268: return that.blocks[that.start]
__pyx_r = (__pyx_v_that->blocks[__pyx_v_that->start]); goto __pyx_L0;
2269:
2270:
+2271: cdef const Block_* Rack_First__(const Rack_* that) nogil:
static Block_ const *__pyx_f_10bytesparse_2_c_Rack_First__(Rack_ const *__pyx_v_that) { Block_ const *__pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
+2272: return that.blocks[that.start]
__pyx_r = (__pyx_v_that->blocks[__pyx_v_that->start]); goto __pyx_L0;
2273:
2274:
+2275: cdef Block_* Rack_Last_(Rack_* that) nogil:
static Block_ *__pyx_f_10bytesparse_2_c_Rack_Last_(Rack_ *__pyx_v_that) { Block_ *__pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
+2276: return that.blocks[that.endex - 1]
__pyx_r = (__pyx_v_that->blocks[(__pyx_v_that->endex - 1)]); goto __pyx_L0;
2277:
2278:
+2279: cdef const Block_* Rack_Last__(const Rack_* that) nogil:
static Block_ const *__pyx_f_10bytesparse_2_c_Rack_Last__(Rack_ const *__pyx_v_that) { Block_ const *__pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
+2280: return that.blocks[that.endex - 1]
__pyx_r = (__pyx_v_that->blocks[(__pyx_v_that->endex - 1)]); goto __pyx_L0;
2281:
2282:
+2283: cdef Block_* Rack_Get__(const Rack_* that, size_t offset) nogil:
static Block_ *__pyx_f_10bytesparse_2_c_Rack_Get__(Rack_ const *__pyx_v_that, size_t __pyx_v_offset) { Block_ *__pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
+2284: return that.blocks[that.start + offset]
__pyx_r = (__pyx_v_that->blocks[(__pyx_v_that->start + __pyx_v_offset)]); goto __pyx_L0;
2285:
2286:
+2287: cdef Block_* Rack_Get_(const Rack_* that, size_t offset) except? NULL:
static Block_ *__pyx_f_10bytesparse_2_c_Rack_Get_(Rack_ const *__pyx_v_that, size_t __pyx_v_offset) { Block_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_Get_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("bytesparse._c.Rack_Get_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+2288: CheckAddSizeU(that.start, offset)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_CheckAddSizeU(__pyx_v_that->start, __pyx_v_offset); if (unlikely(__pyx_t_1 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 2288, __pyx_L1_error)
+2289: offset += that.start
__pyx_v_offset = (__pyx_v_offset + __pyx_v_that->start);
2290:
+2291: if offset < that.endex:
__pyx_t_2 = ((__pyx_v_offset < __pyx_v_that->endex) != 0); if (likely(__pyx_t_2)) { /* … */ }
+2292: return that.blocks[offset]
__pyx_r = (__pyx_v_that->blocks[__pyx_v_offset]); goto __pyx_L0;
2293: else:
+2294: raise IndexError('index out of range')
/*else*/ { __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_IndexError, __pyx_tuple__6, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 2294, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_Raise(__pyx_t_3, 0, 0, 0); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(0, 2294, __pyx_L1_error) }
2295:
2296:
+2297: cdef Block_* Rack_Get(const Rack_* that, ssize_t offset) except? NULL:
static Block_ *__pyx_f_10bytesparse_2_c_Rack_Get(Rack_ const *__pyx_v_that, Py_ssize_t __pyx_v_offset) { Block_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_Get", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.Rack_Get", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+2298: if offset < 0:
__pyx_t_1 = ((__pyx_v_offset < 0) != 0); if (__pyx_t_1) { /* … */ }
+2299: offset += <ssize_t>(that.endex - that.start) # anchor to end
__pyx_v_offset = (__pyx_v_offset + ((Py_ssize_t)(__pyx_v_that->endex - __pyx_v_that->start)));
+2300: if offset < 0:
__pyx_t_1 = ((__pyx_v_offset < 0) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+2301: raise IndexError('index out of range')
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_IndexError, __pyx_tuple__6, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 2301, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 2301, __pyx_L1_error)
2302:
+2303: return Rack_Get_(that, <size_t>offset)
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Rack_Get_(__pyx_v_that, ((size_t)__pyx_v_offset)); if (unlikely(__pyx_t_3 == ((Block_ *)NULL) && PyErr_Occurred())) __PYX_ERR(0, 2303, __pyx_L1_error) __pyx_r = __pyx_t_3; goto __pyx_L0;
2304:
2305:
+2306: cdef Block_* Rack_Set__(Rack_* that, size_t offset, Block_* value) nogil:
static Block_ *__pyx_f_10bytesparse_2_c_Rack_Set__(Rack_ *__pyx_v_that, size_t __pyx_v_offset, Block_ *__pyx_v_value) { Block_ *__pyx_v_backup; Block_ *__pyx_r; /* … */ /* function exit code */ __pyx_L0:; return __pyx_r; }
2307: cdef:
2308: Block_* backup
2309:
+2310: offset += that.start
__pyx_v_offset = (__pyx_v_offset + __pyx_v_that->start);
+2311: backup = that.blocks[offset]
__pyx_v_backup = (__pyx_v_that->blocks[__pyx_v_offset]);
+2312: that.blocks[offset] = value
(__pyx_v_that->blocks[__pyx_v_offset]) = __pyx_v_value;
+2313: return backup
__pyx_r = __pyx_v_backup; goto __pyx_L0;
2314:
2315:
+2316: cdef vint Rack_Set_(Rack_* that, size_t offset, Block_* value, Block_** backup) except -1:
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_Rack_Set_(Rack_ *__pyx_v_that, size_t __pyx_v_offset, Block_ *__pyx_v_value, Block_ **__pyx_v_backup) { __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_Set_", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("bytesparse._c.Rack_Set_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+2317: CheckAddSizeU(that.start, offset)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_CheckAddSizeU(__pyx_v_that->start, __pyx_v_offset); if (unlikely(__pyx_t_1 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 2317, __pyx_L1_error)
+2318: offset += that.start
__pyx_v_offset = (__pyx_v_offset + __pyx_v_that->start);
2319:
+2320: if offset < that.endex:
__pyx_t_2 = ((__pyx_v_offset < __pyx_v_that->endex) != 0); if (__pyx_t_2) { /* … */ goto __pyx_L3; }
+2321: if backup:
__pyx_t_2 = (__pyx_v_backup != 0); if (__pyx_t_2) { /* … */ goto __pyx_L4; }
+2322: backup[0] = that.blocks[offset]
(__pyx_v_backup[0]) = (__pyx_v_that->blocks[__pyx_v_offset]);
2323: else:
+2324: that.blocks[offset] = Block_Release(that.blocks[offset])
/*else*/ { (__pyx_v_that->blocks[__pyx_v_offset]) = __pyx_f_10bytesparse_2_c_Block_Release((__pyx_v_that->blocks[__pyx_v_offset])); } __pyx_L4:;
+2325: that.blocks[offset] = value
(__pyx_v_that->blocks[__pyx_v_offset]) = __pyx_v_value;
2326: else:
+2327: if backup:
/*else*/ { __pyx_t_2 = (__pyx_v_backup != 0); if (__pyx_t_2) { /* … */ }
+2328: backup[0] = NULL
(__pyx_v_backup[0]) = NULL;
+2329: raise IndexError('index out of range')
__pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_IndexError, __pyx_tuple__6, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 2329, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_Raise(__pyx_t_3, 0, 0, 0); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(0, 2329, __pyx_L1_error) } __pyx_L3:;
2330:
2331:
+2332: cdef vint Rack_Set(Rack_* that, ssize_t offset, Block_* value, Block_** backup) except -1:
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_Rack_Set(Rack_ *__pyx_v_that, Py_ssize_t __pyx_v_offset, Block_ *__pyx_v_value, Block_ **__pyx_v_backup) { __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_Set", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.Rack_Set", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+2333: if offset < 0:
__pyx_t_1 = ((__pyx_v_offset < 0) != 0); if (__pyx_t_1) { /* … */ }
+2334: offset += <ssize_t>(that.endex - that.start) # anchor to end
__pyx_v_offset = (__pyx_v_offset + ((Py_ssize_t)(__pyx_v_that->endex - __pyx_v_that->start)));
+2335: if offset < 0:
__pyx_t_1 = ((__pyx_v_offset < 0) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+2336: raise IndexError('index out of range')
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_IndexError, __pyx_tuple__6, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 2336, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 2336, __pyx_L1_error)
2337:
+2338: Rack_Set_(that, <size_t>offset, value, backup)
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Rack_Set_(__pyx_v_that, ((size_t)__pyx_v_offset), __pyx_v_value, __pyx_v_backup); if (unlikely(__pyx_t_3 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 2338, __pyx_L1_error)
2339:
2340:
+2341: cdef Rack_* Rack_Pop__(Rack_* that, Block_** value) except NULL:
static Rack_ *__pyx_f_10bytesparse_2_c_Rack_Pop__(Rack_ *__pyx_v_that, Block_ **__pyx_v_value) { Rack_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_Pop__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_4); __Pyx_AddTraceback("bytesparse._c.Rack_Pop__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+2342: if that.start < that.endex:
__pyx_t_1 = ((__pyx_v_that->start < __pyx_v_that->endex) != 0); if (__pyx_t_1) { /* … */ }
+2343: if value:
__pyx_t_1 = (__pyx_v_value != 0); if (__pyx_t_1) { /* … */ }
+2344: value[0] = Block_Acquire(that.blocks[that.endex - 1]) # backup
__pyx_t_2 = __pyx_f_10bytesparse_2_c_Block_Acquire((__pyx_v_that->blocks[(__pyx_v_that->endex - 1)])); if (unlikely(__pyx_t_2 == ((Block_ *)NULL))) __PYX_ERR(0, 2344, __pyx_L1_error)
(__pyx_v_value[0]) = __pyx_t_2;
2345:
+2346: return Rack_Delete_(that, that.endex - that.start - 1, 1)
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Rack_Delete_(__pyx_v_that, ((__pyx_v_that->endex - __pyx_v_that->start) - 1), 1); if (unlikely(__pyx_t_3 == ((Rack_ *)NULL))) __PYX_ERR(0, 2346, __pyx_L1_error)
__pyx_r = __pyx_t_3;
goto __pyx_L0;
2347: else:
+2348: if value:
/*else*/ { __pyx_t_1 = (__pyx_v_value != 0); if (__pyx_t_1) { /* … */ }
+2349: value[0] = NULL
(__pyx_v_value[0]) = NULL;
+2350: raise IndexError('pop index out of range')
__pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_IndexError, __pyx_tuple__7, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 2350, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_Raise(__pyx_t_4, 0, 0, 0); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __PYX_ERR(0, 2350, __pyx_L1_error) }
2351:
2352:
+2353: cdef Rack_* Rack_Pop_(Rack_* that, size_t offset, Block_** value) except NULL:
static Rack_ *__pyx_f_10bytesparse_2_c_Rack_Pop_(Rack_ *__pyx_v_that, size_t __pyx_v_offset, Block_ **__pyx_v_value) { Rack_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_Pop_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_5); __Pyx_AddTraceback("bytesparse._c.Rack_Pop_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+2354: CheckAddSizeU(that.start, offset)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_CheckAddSizeU(__pyx_v_that->start, __pyx_v_offset); if (unlikely(__pyx_t_1 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 2354, __pyx_L1_error)
2355:
+2356: if that.start + offset < that.endex:
__pyx_t_2 = (((__pyx_v_that->start + __pyx_v_offset) < __pyx_v_that->endex) != 0); if (__pyx_t_2) { /* … */ }
+2357: if value:
__pyx_t_2 = (__pyx_v_value != 0); if (__pyx_t_2) { /* … */ }
+2358: value[0] = Block_Acquire(that.blocks[that.start + offset]) # backup
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Block_Acquire((__pyx_v_that->blocks[(__pyx_v_that->start + __pyx_v_offset)])); if (unlikely(__pyx_t_3 == ((Block_ *)NULL))) __PYX_ERR(0, 2358, __pyx_L1_error)
(__pyx_v_value[0]) = __pyx_t_3;
2359:
+2360: return Rack_Delete_(that, offset, 1)
__pyx_t_4 = __pyx_f_10bytesparse_2_c_Rack_Delete_(__pyx_v_that, __pyx_v_offset, 1); if (unlikely(__pyx_t_4 == ((Rack_ *)NULL))) __PYX_ERR(0, 2360, __pyx_L1_error)
__pyx_r = __pyx_t_4;
goto __pyx_L0;
2361: else:
+2362: if value:
/*else*/ { __pyx_t_2 = (__pyx_v_value != 0); if (__pyx_t_2) { /* … */ }
+2363: value[0] = NULL
(__pyx_v_value[0]) = NULL;
+2364: raise IndexError('pop index out of range')
__pyx_t_5 = __Pyx_PyObject_Call(__pyx_builtin_IndexError, __pyx_tuple__7, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 2364, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __Pyx_Raise(__pyx_t_5, 0, 0, 0); __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; __PYX_ERR(0, 2364, __pyx_L1_error) }
2365:
2366:
+2367: cdef Rack_* Rack_Pop(Rack_* that, ssize_t offset, Block_** value) except NULL:
static Rack_ *__pyx_f_10bytesparse_2_c_Rack_Pop(Rack_ *__pyx_v_that, Py_ssize_t __pyx_v_offset, Block_ **__pyx_v_value) { Rack_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_Pop", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.Rack_Pop", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+2368: if offset < 0:
__pyx_t_1 = ((__pyx_v_offset < 0) != 0); if (__pyx_t_1) { /* … */ }
+2369: offset += <ssize_t>(that.endex - that.start) # anchor to end
__pyx_v_offset = (__pyx_v_offset + ((Py_ssize_t)(__pyx_v_that->endex - __pyx_v_that->start)));
+2370: if offset < 0:
__pyx_t_1 = ((__pyx_v_offset < 0) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+2371: raise IndexError('pop index out of range')
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_IndexError, __pyx_tuple__7, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 2371, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 2371, __pyx_L1_error)
2372:
+2373: return Rack_Pop_(that, <size_t>offset, value)
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Rack_Pop_(__pyx_v_that, ((size_t)__pyx_v_offset), __pyx_v_value); if (unlikely(__pyx_t_3 == ((Rack_ *)NULL))) __PYX_ERR(0, 2373, __pyx_L1_error)
__pyx_r = __pyx_t_3;
goto __pyx_L0;
2374:
2375:
+2376: cdef Rack_* Rack_PopLeft(Rack_* that, Block_** value) except NULL:
static Rack_ *__pyx_f_10bytesparse_2_c_Rack_PopLeft(Rack_ *__pyx_v_that, Block_ **__pyx_v_value) { Rack_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_PopLeft", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Rack_PopLeft", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+2377: return Rack_Pop_(that, 0, value)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_Rack_Pop_(__pyx_v_that, 0, __pyx_v_value); if (unlikely(__pyx_t_1 == ((Rack_ *)NULL))) __PYX_ERR(0, 2377, __pyx_L1_error)
__pyx_r = __pyx_t_1;
goto __pyx_L0;
2378:
2379:
+2380: cdef Rack_* Rack_Insert_(Rack_* that, size_t offset, Block_* value) except NULL:
static Rack_ *__pyx_f_10bytesparse_2_c_Rack_Insert_(Rack_ *__pyx_v_that, size_t __pyx_v_offset, Block_ *__pyx_v_value) { Rack_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_Insert_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Rack_Insert_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
2381: # Insert the value at the requested offset
+2382: that = Rack_Reserve_(that, offset, 1)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_Rack_Reserve_(__pyx_v_that, __pyx_v_offset, 1); if (unlikely(__pyx_t_1 == ((Rack_ *)NULL))) __PYX_ERR(0, 2382, __pyx_L1_error)
__pyx_v_that = __pyx_t_1;
+2383: that.blocks[that.start + offset] = value
(__pyx_v_that->blocks[(__pyx_v_that->start + __pyx_v_offset)]) = __pyx_v_value;
+2384: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
2385:
2386:
+2387: cdef Rack_* Rack_Insert(Rack_* that, ssize_t offset, Block_* value) except NULL:
static Rack_ *__pyx_f_10bytesparse_2_c_Rack_Insert(Rack_ *__pyx_v_that, Py_ssize_t __pyx_v_offset, Block_ *__pyx_v_value) { Py_ssize_t __pyx_v_size; Rack_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_Insert", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Rack_Insert", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
2388: cdef:
+2389: ssize_t size = <ssize_t>(that.endex - that.start)
__pyx_v_size = ((Py_ssize_t)(__pyx_v_that->endex - __pyx_v_that->start));
2390:
+2391: if offset < 0:
__pyx_t_1 = ((__pyx_v_offset < 0) != 0); if (__pyx_t_1) { /* … */ goto __pyx_L3; }
+2392: offset += size # anchor to end
__pyx_v_offset = (__pyx_v_offset + __pyx_v_size);
+2393: if offset < 0:
__pyx_t_1 = ((__pyx_v_offset < 0) != 0); if (__pyx_t_1) { /* … */ }
2394: # raise IndexError('index out of range')
+2395: offset = 0 # as per bytearray.insert
__pyx_v_offset = 0;
2396:
+2397: elif offset > size:
__pyx_t_1 = ((__pyx_v_offset > __pyx_v_size) != 0); if (__pyx_t_1) { /* … */ } __pyx_L3:;
2398: # raise IndexError('index out of range')
+2399: offset = size # as per bytearray.insert
__pyx_v_offset = __pyx_v_size;
2400:
+2401: return Rack_Insert_(that, <size_t>offset, value)
__pyx_t_2 = __pyx_f_10bytesparse_2_c_Rack_Insert_(__pyx_v_that, ((size_t)__pyx_v_offset), __pyx_v_value); if (unlikely(__pyx_t_2 == ((Rack_ *)NULL))) __PYX_ERR(0, 2401, __pyx_L1_error)
__pyx_r = __pyx_t_2;
goto __pyx_L0;
2402:
2403:
+2404: cdef Rack_* Rack_Append(Rack_* that, Block_* value) except NULL:
static Rack_ *__pyx_f_10bytesparse_2_c_Rack_Append(Rack_ *__pyx_v_that, Block_ *__pyx_v_value) { Rack_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_Append", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Rack_Append", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
2405: # Insert the value after the end
+2406: that = Rack_Reserve_(that, that.endex - that.start, 1)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_Rack_Reserve_(__pyx_v_that, (__pyx_v_that->endex - __pyx_v_that->start), 1); if (unlikely(__pyx_t_1 == ((Rack_ *)NULL))) __PYX_ERR(0, 2406, __pyx_L1_error)
__pyx_v_that = __pyx_t_1;
+2407: that.blocks[that.endex - 1] = value
(__pyx_v_that->blocks[(__pyx_v_that->endex - 1)]) = __pyx_v_value;
+2408: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
2409:
2410:
+2411: cdef Rack_* Rack_AppendLeft(Rack_* that, Block_* value) except NULL:
static Rack_ *__pyx_f_10bytesparse_2_c_Rack_AppendLeft(Rack_ *__pyx_v_that, Block_ *__pyx_v_value) { Rack_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_AppendLeft", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Rack_AppendLeft", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
2412: # Insert the value after the end
+2413: that = Rack_Reserve_(that, 0, 1)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_Rack_Reserve_(__pyx_v_that, 0, 1); if (unlikely(__pyx_t_1 == ((Rack_ *)NULL))) __PYX_ERR(0, 2413, __pyx_L1_error)
__pyx_v_that = __pyx_t_1;
+2414: that.blocks[that.start] = value
(__pyx_v_that->blocks[__pyx_v_that->start]) = __pyx_v_value;
+2415: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
2416:
2417:
+2418: cdef Rack_* Rack_Extend_(Rack_* that, size_t size, Block_** buffer, bint direct) except NULL:
static Rack_ *__pyx_f_10bytesparse_2_c_Rack_Extend_(Rack_ *__pyx_v_that, size_t __pyx_v_size, Block_ **__pyx_v_buffer, int __pyx_v_direct) { size_t __pyx_v_start; size_t __pyx_v_offset; Rack_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_Extend_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Rack_Extend_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
2419: cdef:
2420: size_t start
2421: size_t offset
2422:
+2423: if size:
__pyx_t_1 = (__pyx_v_size != 0); if (__pyx_t_1) { /* … */ }
+2424: that = Rack_Reserve_(that, that.endex - that.start, size)
__pyx_t_2 = __pyx_f_10bytesparse_2_c_Rack_Reserve_(__pyx_v_that, (__pyx_v_that->endex - __pyx_v_that->start), __pyx_v_size); if (unlikely(__pyx_t_2 == ((Rack_ *)NULL))) __PYX_ERR(0, 2424, __pyx_L1_error)
__pyx_v_that = __pyx_t_2;
+2425: if direct:
__pyx_t_1 = (__pyx_v_direct != 0); if (__pyx_t_1) { /* … */ goto __pyx_L4; }
+2426: memmove(&that.blocks[that.endex - size], buffer, size * sizeof(Block_*))
(void)(memmove((&(__pyx_v_that->blocks[(__pyx_v_that->endex - __pyx_v_size)])), __pyx_v_buffer, (__pyx_v_size * (sizeof(Block_ *)))));
2427: else:
+2428: start = that.endex - size
/*else*/ { __pyx_v_start = (__pyx_v_that->endex - __pyx_v_size);
+2429: for offset in range(size):
__pyx_t_3 = __pyx_v_size; __pyx_t_4 = __pyx_t_3; for (__pyx_t_5 = 0; __pyx_t_5 < __pyx_t_4; __pyx_t_5+=1) { __pyx_v_offset = __pyx_t_5;
+2430: that.blocks[start + offset] = Block_Acquire(buffer[offset])
__pyx_t_6 = __pyx_f_10bytesparse_2_c_Block_Acquire((__pyx_v_buffer[__pyx_v_offset])); if (unlikely(__pyx_t_6 == ((Block_ *)NULL))) __PYX_ERR(0, 2430, __pyx_L1_error)
(__pyx_v_that->blocks[(__pyx_v_start + __pyx_v_offset)]) = __pyx_t_6;
}
}
__pyx_L4:;
+2431: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
2432:
2433:
+2434: cdef Rack_* Rack_Extend(Rack_* that, Rack_* more) except NULL:
static Rack_ *__pyx_f_10bytesparse_2_c_Rack_Extend(Rack_ *__pyx_v_that, Rack_ *__pyx_v_more) { Rack_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_Extend", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Rack_Extend", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+2435: that = Rack_Extend_(that, more.endex - more.start, &more.blocks[more.start], False)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_Rack_Extend_(__pyx_v_that, (__pyx_v_more->endex - __pyx_v_more->start), (&(__pyx_v_more->blocks[__pyx_v_more->start])), 0); if (unlikely(__pyx_t_1 == ((Rack_ *)NULL))) __PYX_ERR(0, 2435, __pyx_L1_error)
__pyx_v_that = __pyx_t_1;
+2436: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
2437:
2438:
+2439: cdef Rack_* Rack_ExtendLeft_(Rack_* that, size_t size, Block_** buffer, bint direct) except NULL:
static Rack_ *__pyx_f_10bytesparse_2_c_Rack_ExtendLeft_(Rack_ *__pyx_v_that, size_t __pyx_v_size, Block_ **__pyx_v_buffer, int __pyx_v_direct) { size_t __pyx_v_start; size_t __pyx_v_offset; Rack_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_ExtendLeft_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Rack_ExtendLeft_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
2440: cdef:
2441: size_t start
2442: size_t offset
2443:
+2444: if size:
__pyx_t_1 = (__pyx_v_size != 0); if (__pyx_t_1) { /* … */ }
+2445: that = Rack_Reserve_(that, 0, size)
__pyx_t_2 = __pyx_f_10bytesparse_2_c_Rack_Reserve_(__pyx_v_that, 0, __pyx_v_size); if (unlikely(__pyx_t_2 == ((Rack_ *)NULL))) __PYX_ERR(0, 2445, __pyx_L1_error)
__pyx_v_that = __pyx_t_2;
+2446: if direct:
__pyx_t_1 = (__pyx_v_direct != 0); if (__pyx_t_1) { /* … */ goto __pyx_L4; }
+2447: memmove(&that.blocks[that.endex - size], buffer, size * sizeof(Block_*))
(void)(memmove((&(__pyx_v_that->blocks[(__pyx_v_that->endex - __pyx_v_size)])), __pyx_v_buffer, (__pyx_v_size * (sizeof(Block_ *)))));
2448: else:
+2449: start = that.start
/*else*/ { __pyx_t_3 = __pyx_v_that->start; __pyx_v_start = __pyx_t_3;
+2450: for offset in range(size):
__pyx_t_3 = __pyx_v_size; __pyx_t_4 = __pyx_t_3; for (__pyx_t_5 = 0; __pyx_t_5 < __pyx_t_4; __pyx_t_5+=1) { __pyx_v_offset = __pyx_t_5;
+2451: that.blocks[start + offset] = Block_Acquire(buffer[offset])
__pyx_t_6 = __pyx_f_10bytesparse_2_c_Block_Acquire((__pyx_v_buffer[__pyx_v_offset])); if (unlikely(__pyx_t_6 == ((Block_ *)NULL))) __PYX_ERR(0, 2451, __pyx_L1_error)
(__pyx_v_that->blocks[(__pyx_v_start + __pyx_v_offset)]) = __pyx_t_6;
}
}
__pyx_L4:;
+2452: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
2453:
2454:
+2455: cdef Rack_* Rack_ExtendLeft(Rack_* that, Rack_* more) except NULL:
static Rack_ *__pyx_f_10bytesparse_2_c_Rack_ExtendLeft(Rack_ *__pyx_v_that, Rack_ *__pyx_v_more) { Rack_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_ExtendLeft", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Rack_ExtendLeft", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+2456: that = Rack_ExtendLeft_(that, more.endex - more.start, &more.blocks[more.start], False)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_Rack_ExtendLeft_(__pyx_v_that, (__pyx_v_more->endex - __pyx_v_more->start), (&(__pyx_v_more->blocks[__pyx_v_more->start])), 0); if (unlikely(__pyx_t_1 == ((Rack_ *)NULL))) __PYX_ERR(0, 2456, __pyx_L1_error)
__pyx_v_that = __pyx_t_1;
+2457: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
2458:
2459:
+2460: cdef vint Rack_Read_(const Rack_* that, size_t offset,
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_Rack_Read_(Rack_ const *__pyx_v_that, size_t __pyx_v_offset, size_t __pyx_v_size, Block_ **__pyx_v_buffer, int __pyx_v_direct) { __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_Read_", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.Rack_Read_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
2461: size_t size, Block_** buffer, bint direct) except -1:
+2462: if size:
__pyx_t_1 = (__pyx_v_size != 0); if (__pyx_t_1) { /* … */ }
+2463: if size > SIZE_HMAX:
__pyx_t_1 = ((__pyx_v_size > SIZE_HMAX) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+2464: raise OverflowError('size overflow')
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_OverflowError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 2464, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 2464, __pyx_L1_error)
2465:
+2466: CheckAddSizeU(that.start, offset)
__pyx_t_3 = __pyx_f_10bytesparse_2_c_CheckAddSizeU(__pyx_v_that->start, __pyx_v_offset); if (unlikely(__pyx_t_3 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 2466, __pyx_L1_error)
+2467: offset += that.start
__pyx_v_offset = (__pyx_v_offset + __pyx_v_that->start);
2468:
+2469: CheckAddSizeU(offset, size)
__pyx_t_3 = __pyx_f_10bytesparse_2_c_CheckAddSizeU(__pyx_v_offset, __pyx_v_size); if (unlikely(__pyx_t_3 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 2469, __pyx_L1_error)
+2470: if that.endex <= offset + size:
__pyx_t_1 = ((__pyx_v_that->endex <= (__pyx_v_offset + __pyx_v_size)) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+2471: raise IndexError('index out of range')
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_IndexError, __pyx_tuple__6, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 2471, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 2471, __pyx_L1_error)
2472:
+2473: if direct:
__pyx_t_1 = (__pyx_v_direct != 0); if (__pyx_t_1) { /* … */ goto __pyx_L6; }
+2474: memmove(buffer, &that.blocks[offset], size * sizeof(Block_*))
(void)(memmove(__pyx_v_buffer, (&(__pyx_v_that->blocks[__pyx_v_offset])), (__pyx_v_size * (sizeof(Block_ *)))));
2475: else:
+2476: for offset in range(offset, offset + size):
/*else*/ { __pyx_t_4 = (__pyx_v_offset + __pyx_v_size); __pyx_t_5 = __pyx_t_4; for (__pyx_t_6 = __pyx_v_offset; __pyx_t_6 < __pyx_t_5; __pyx_t_6+=1) { __pyx_v_offset = __pyx_t_6;
+2477: buffer[offset - that.start] = Block_Acquire(buffer[offset])
__pyx_t_7 = __pyx_f_10bytesparse_2_c_Block_Acquire((__pyx_v_buffer[__pyx_v_offset])); if (unlikely(__pyx_t_7 == ((Block_ *)NULL))) __PYX_ERR(0, 2477, __pyx_L1_error)
(__pyx_v_buffer[(__pyx_v_offset - __pyx_v_that->start)]) = __pyx_t_7;
}
}
__pyx_L6:;
2478:
2479:
+2480: cdef Rack_* Rack_Write_(Rack_* that, size_t offset,
static Rack_ *__pyx_f_10bytesparse_2_c_Rack_Write_(Rack_ *__pyx_v_that, size_t __pyx_v_offset, size_t __pyx_v_size, Block_ **__pyx_v_buffer, int __pyx_v_direct) { Rack_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_Write_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Rack_Write_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
2481: size_t size, Block_** buffer, bint direct) except NULL:
+2482: if size:
__pyx_t_1 = (__pyx_v_size != 0); if (__pyx_t_1) { /* … */ }
+2483: CheckAddSizeU(that.start, offset)
__pyx_t_2 = __pyx_f_10bytesparse_2_c_CheckAddSizeU(__pyx_v_that->start, __pyx_v_offset); if (unlikely(__pyx_t_2 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 2483, __pyx_L1_error)
+2484: offset += that.start
__pyx_v_offset = (__pyx_v_offset + __pyx_v_that->start);
2485:
+2486: CheckAddSizeU(offset, size)
__pyx_t_2 = __pyx_f_10bytesparse_2_c_CheckAddSizeU(__pyx_v_offset, __pyx_v_size); if (unlikely(__pyx_t_2 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 2486, __pyx_L1_error)
+2487: if that.endex < offset + size:
__pyx_t_1 = ((__pyx_v_that->endex < (__pyx_v_offset + __pyx_v_size)) != 0); if (__pyx_t_1) { /* … */ }
+2488: that = Rack_Reserve_(that, that.endex - that.start, (offset + size) - that.endex)
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Rack_Reserve_(__pyx_v_that, (__pyx_v_that->endex - __pyx_v_that->start), ((__pyx_v_offset + __pyx_v_size) - __pyx_v_that->endex)); if (unlikely(__pyx_t_3 == ((Rack_ *)NULL))) __PYX_ERR(0, 2488, __pyx_L1_error)
__pyx_v_that = __pyx_t_3;
2489:
+2490: if direct:
__pyx_t_1 = (__pyx_v_direct != 0); if (__pyx_t_1) { /* … */ goto __pyx_L5; }
+2491: memmove(&that.blocks[offset], buffer, size * sizeof(Block_*))
(void)(memmove((&(__pyx_v_that->blocks[__pyx_v_offset])), __pyx_v_buffer, (__pyx_v_size * (sizeof(Block_ *)))));
2492: else:
+2493: for offset in range(offset, offset + size):
/*else*/ { __pyx_t_4 = (__pyx_v_offset + __pyx_v_size); __pyx_t_5 = __pyx_t_4; for (__pyx_t_6 = __pyx_v_offset; __pyx_t_6 < __pyx_t_5; __pyx_t_6+=1) { __pyx_v_offset = __pyx_t_6;
+2494: that.blocks[offset] = Block_Release(that.blocks[offset])
(__pyx_v_that->blocks[__pyx_v_offset]) = __pyx_f_10bytesparse_2_c_Block_Release((__pyx_v_that->blocks[__pyx_v_offset]));
+2495: that.blocks[offset] = Block_Acquire(buffer[offset - that.start])
__pyx_t_7 = __pyx_f_10bytesparse_2_c_Block_Acquire((__pyx_v_buffer[(__pyx_v_offset - __pyx_v_that->start)])); if (unlikely(__pyx_t_7 == ((Block_ *)NULL))) __PYX_ERR(0, 2495, __pyx_L1_error)
(__pyx_v_that->blocks[__pyx_v_offset]) = __pyx_t_7;
}
}
__pyx_L5:;
2496:
+2497: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
2498:
2499:
+2500: cdef vint Rack_ReadSlice_(const Rack_* that, size_t start, size_t endex,
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_Rack_ReadSlice_(Rack_ const *__pyx_v_that, size_t __pyx_v_start, size_t __pyx_v_endex, size_t *__pyx_v_size_, Block_ **__pyx_v_buffer, int __pyx_v_direct) { size_t __pyx_v_size; __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_ReadSlice_", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.Rack_ReadSlice_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
2501: size_t* size_, Block_** buffer, bint direct) except -1:
2502: cdef:
+2503: size_t size = that.endex - that.start
__pyx_v_size = (__pyx_v_that->endex - __pyx_v_that->start);
2504:
+2505: size_[0] = 0
(__pyx_v_size_[0]) = 0;
2506:
+2507: if start > SIZE_HMAX:
__pyx_t_1 = ((__pyx_v_start > SIZE_HMAX) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+2508: raise OverflowError('size overflow')
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_OverflowError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 2508, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 2508, __pyx_L1_error)
+2509: elif start > size:
__pyx_t_1 = ((__pyx_v_start > __pyx_v_size) != 0); if (__pyx_t_1) { /* … */ }
+2510: start = size # trim source start
__pyx_v_start = __pyx_v_size;
2511:
+2512: if endex > SIZE_HMAX:
__pyx_t_1 = ((__pyx_v_endex > SIZE_HMAX) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+2513: raise OverflowError('size overflow')
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_OverflowError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 2513, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 2513, __pyx_L1_error)
+2514: elif endex < start:
__pyx_t_1 = ((__pyx_v_endex < __pyx_v_start) != 0); if (__pyx_t_1) { /* … */ goto __pyx_L4; }
+2515: endex = start # clamp negative source length
__pyx_v_endex = __pyx_v_start;
+2516: elif endex > size:
__pyx_t_1 = ((__pyx_v_endex > __pyx_v_size) != 0); if (__pyx_t_1) { /* … */ } __pyx_L4:;
+2517: endex = size # trim source end
__pyx_v_endex = __pyx_v_size;
2518:
+2519: size = endex - start
__pyx_v_size = (__pyx_v_endex - __pyx_v_start);
+2520: Rack_Read_(that, start, size, buffer, direct)
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Rack_Read_(__pyx_v_that, __pyx_v_start, __pyx_v_size, __pyx_v_buffer, __pyx_v_direct); if (unlikely(__pyx_t_3 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 2520, __pyx_L1_error)
+2521: size_[0] = size
(__pyx_v_size_[0]) = __pyx_v_size;
2522:
2523:
+2524: cdef vint Rack_ReadSlice(const Rack_* that, ssize_t start, ssize_t endex,
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_Rack_ReadSlice(Rack_ const *__pyx_v_that, Py_ssize_t __pyx_v_start, Py_ssize_t __pyx_v_endex, size_t *__pyx_v_size_, Block_ **__pyx_v_buffer, int __pyx_v_direct) { Py_ssize_t __pyx_v_ssize; __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_ReadSlice", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Rack_ReadSlice", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
2525: size_t* size_, Block_** buffer, bint direct) except -1:
2526: cdef:
+2527: ssize_t ssize = <ssize_t>(that.endex - that.start)
__pyx_v_ssize = ((Py_ssize_t)(__pyx_v_that->endex - __pyx_v_that->start));
2528:
+2529: if start < 0:
__pyx_t_1 = ((__pyx_v_start < 0) != 0); if (__pyx_t_1) { /* … */ }
+2530: start += ssize # anchor to end
__pyx_v_start = (__pyx_v_start + __pyx_v_ssize);
+2531: if start < 0:
__pyx_t_1 = ((__pyx_v_start < 0) != 0); if (__pyx_t_1) { /* … */ }
+2532: start = 0 # trim source start
__pyx_v_start = 0;
2533:
+2534: if endex < 0:
__pyx_t_1 = ((__pyx_v_endex < 0) != 0); if (__pyx_t_1) { /* … */ }
+2535: endex += ssize # anchor to end
__pyx_v_endex = (__pyx_v_endex + __pyx_v_ssize);
+2536: if endex < start:
__pyx_t_1 = ((__pyx_v_endex < __pyx_v_start) != 0); if (__pyx_t_1) { /* … */ }
+2537: endex = start # clamp negative source length
__pyx_v_endex = __pyx_v_start;
2538:
+2539: Rack_ReadSlice_(that, <size_t>start, <size_t>endex, size_, buffer, direct)
__pyx_t_2 = __pyx_f_10bytesparse_2_c_Rack_ReadSlice_(__pyx_v_that, ((size_t)__pyx_v_start), ((size_t)__pyx_v_endex), __pyx_v_size_, __pyx_v_buffer, __pyx_v_direct); if (unlikely(__pyx_t_2 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 2539, __pyx_L1_error)
2540:
2541:
+2542: cdef Rack_* Rack_GetSlice_(const Rack_* that, size_t start, size_t endex) except NULL:
static Rack_ *__pyx_f_10bytesparse_2_c_Rack_GetSlice_(Rack_ const *__pyx_v_that, size_t __pyx_v_start, size_t __pyx_v_endex) { Rack_ *__pyx_v_other; size_t __pyx_v_size; size_t __pyx_v_offset; size_t __pyx_v_offset2; Rack_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_GetSlice_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_12); __Pyx_XDECREF(__pyx_t_13); __Pyx_AddTraceback("bytesparse._c.Rack_GetSlice_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
2543: cdef:
+2544: Rack_* other = NULL
__pyx_v_other = NULL;
+2545: size_t size = that.endex - that.start
__pyx_v_size = (__pyx_v_that->endex - __pyx_v_that->start);
2546: size_t offset
2547: size_t offset2
2548:
+2549: if start > SIZE_HMAX:
__pyx_t_1 = ((__pyx_v_start > SIZE_HMAX) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+2550: raise OverflowError('size overflow')
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_OverflowError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 2550, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 2550, __pyx_L1_error)
+2551: elif start > size:
__pyx_t_1 = ((__pyx_v_start > __pyx_v_size) != 0); if (__pyx_t_1) { /* … */ }
+2552: start = size # trim source start
__pyx_v_start = __pyx_v_size;
2553:
+2554: if endex > SIZE_HMAX:
__pyx_t_1 = ((__pyx_v_endex > SIZE_HMAX) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+2555: raise OverflowError('size overflow')
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_OverflowError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 2555, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 2555, __pyx_L1_error)
+2556: elif endex < start:
__pyx_t_1 = ((__pyx_v_endex < __pyx_v_start) != 0); if (__pyx_t_1) { /* … */ goto __pyx_L4; }
+2557: endex = start # clamp negative source length
__pyx_v_endex = __pyx_v_start;
+2558: elif endex > size:
__pyx_t_1 = ((__pyx_v_endex > __pyx_v_size) != 0); if (__pyx_t_1) { /* … */ } __pyx_L4:;
+2559: endex = size # trim source end
__pyx_v_endex = __pyx_v_size;
2560:
+2561: try:
{ /*try:*/ { /* … */ } __pyx_L5_error:; __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; /* … */ __Pyx_XGIVEREF(__pyx_t_3); __Pyx_XGIVEREF(__pyx_t_4); __Pyx_XGIVEREF(__pyx_t_5); __Pyx_ExceptionReset(__pyx_t_3, __pyx_t_4, __pyx_t_5); goto __pyx_L1_error; __pyx_L9_try_return:; __Pyx_XGIVEREF(__pyx_t_3); __Pyx_XGIVEREF(__pyx_t_4); __Pyx_XGIVEREF(__pyx_t_5); __Pyx_ExceptionReset(__pyx_t_3, __pyx_t_4, __pyx_t_5); goto __pyx_L0; }
+2562: size = endex - start
__pyx_v_size = (__pyx_v_endex - __pyx_v_start);
+2563: other = Rack_Alloc(size)
__pyx_t_6 = __pyx_f_10bytesparse_2_c_Rack_Alloc(__pyx_v_size); if (unlikely(__pyx_t_6 == ((Rack_ *)NULL))) __PYX_ERR(0, 2563, __pyx_L5_error)
__pyx_v_other = __pyx_t_6;
+2564: CheckAddSizeU(other.start, size)
__pyx_t_7 = __pyx_f_10bytesparse_2_c_CheckAddSizeU(__pyx_v_other->start, __pyx_v_size); if (unlikely(__pyx_t_7 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 2564, __pyx_L5_error)
+2565: offset2 = that.start + start
__pyx_v_offset2 = (__pyx_v_that->start + __pyx_v_start);
2566:
+2567: for offset in range(other.start, other.start + size):
__pyx_t_8 = (__pyx_v_other->start + __pyx_v_size); __pyx_t_9 = __pyx_t_8; for (__pyx_t_10 = __pyx_v_other->start; __pyx_t_10 < __pyx_t_9; __pyx_t_10+=1) { __pyx_v_offset = __pyx_t_10;
+2568: other.blocks[offset] = Block_Acquire(that.blocks[offset2])
__pyx_t_11 = __pyx_f_10bytesparse_2_c_Block_Acquire((__pyx_v_that->blocks[__pyx_v_offset2])); if (unlikely(__pyx_t_11 == ((Block_ *)NULL))) __PYX_ERR(0, 2568, __pyx_L5_error)
(__pyx_v_other->blocks[__pyx_v_offset]) = __pyx_t_11;
+2569: offset2 += 1
__pyx_v_offset2 = (__pyx_v_offset2 + 1); }
2570:
+2571: return other
__pyx_r = __pyx_v_other; goto __pyx_L9_try_return;
+2572: except:
/*except:*/ { __Pyx_AddTraceback("bytesparse._c.Rack_GetSlice_", __pyx_clineno, __pyx_lineno, __pyx_filename); if (__Pyx_GetException(&__pyx_t_2, &__pyx_t_12, &__pyx_t_13) < 0) __PYX_ERR(0, 2572, __pyx_L7_except_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_GOTREF(__pyx_t_12); __Pyx_GOTREF(__pyx_t_13);
+2573: other = Rack_Free(other)
__pyx_v_other = __pyx_f_10bytesparse_2_c_Rack_Free(__pyx_v_other);
+2574: raise
__Pyx_GIVEREF(__pyx_t_2); __Pyx_GIVEREF(__pyx_t_12); __Pyx_XGIVEREF(__pyx_t_13); __Pyx_ErrRestoreWithState(__pyx_t_2, __pyx_t_12, __pyx_t_13); __pyx_t_2 = 0; __pyx_t_12 = 0; __pyx_t_13 = 0; __PYX_ERR(0, 2574, __pyx_L7_except_error) } __pyx_L7_except_error:;
2575:
2576:
+2577: cdef Rack_* Rack_GetSlice(const Rack_* that, ssize_t start, ssize_t endex) except NULL:
static Rack_ *__pyx_f_10bytesparse_2_c_Rack_GetSlice(Rack_ const *__pyx_v_that, Py_ssize_t __pyx_v_start, Py_ssize_t __pyx_v_endex) { Py_ssize_t __pyx_v_ssize; Rack_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_GetSlice", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Rack_GetSlice", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
2578: cdef:
+2579: ssize_t ssize = <ssize_t>(that.endex - that.start)
__pyx_v_ssize = ((Py_ssize_t)(__pyx_v_that->endex - __pyx_v_that->start));
2580:
+2581: if start < 0:
__pyx_t_1 = ((__pyx_v_start < 0) != 0); if (__pyx_t_1) { /* … */ }
+2582: start += ssize # anchor to end
__pyx_v_start = (__pyx_v_start + __pyx_v_ssize);
+2583: if start < 0:
__pyx_t_1 = ((__pyx_v_start < 0) != 0); if (__pyx_t_1) { /* … */ }
+2584: start = 0 # trim source start
__pyx_v_start = 0;
2585:
+2586: if endex < 0:
__pyx_t_1 = ((__pyx_v_endex < 0) != 0); if (__pyx_t_1) { /* … */ }
+2587: endex += ssize # anchor to end
__pyx_v_endex = (__pyx_v_endex + __pyx_v_ssize);
+2588: if endex < start:
__pyx_t_1 = ((__pyx_v_endex < __pyx_v_start) != 0); if (__pyx_t_1) { /* … */ }
+2589: endex = start # clamp negative source length
__pyx_v_endex = __pyx_v_start;
2590:
+2591: return Rack_GetSlice_(that, <size_t>start, <size_t>endex)
__pyx_t_2 = __pyx_f_10bytesparse_2_c_Rack_GetSlice_(__pyx_v_that, ((size_t)__pyx_v_start), ((size_t)__pyx_v_endex)); if (unlikely(__pyx_t_2 == ((Rack_ *)NULL))) __PYX_ERR(0, 2591, __pyx_L1_error)
__pyx_r = __pyx_t_2;
goto __pyx_L0;
2592:
2593:
+2594: cdef Rack_* Rack_WriteSlice_(Rack_* that, size_t start, size_t endex,
static Rack_ *__pyx_f_10bytesparse_2_c_Rack_WriteSlice_(Rack_ *__pyx_v_that, size_t __pyx_v_start, size_t __pyx_v_endex, size_t __pyx_v_size, Block_ **__pyx_v_buffer, int __pyx_v_direct) { size_t __pyx_v_size2; Rack_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_WriteSlice_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.Rack_WriteSlice_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
2595: size_t size, Block_** buffer, bint direct) except NULL:
2596: cdef:
2597: size_t size2 # source size
2598:
+2599: size2 = size
__pyx_v_size2 = __pyx_v_size;
+2600: size = that.endex - that.start
__pyx_v_size = (__pyx_v_that->endex - __pyx_v_that->start);
2601:
+2602: if start > SIZE_HMAX:
__pyx_t_1 = ((__pyx_v_start > SIZE_HMAX) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+2603: raise OverflowError('size overflow')
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_OverflowError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 2603, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 2603, __pyx_L1_error)
+2604: elif start > size:
__pyx_t_1 = ((__pyx_v_start > __pyx_v_size) != 0); if (__pyx_t_1) { /* … */ }
+2605: start = size # trim target start
__pyx_v_start = __pyx_v_size;
2606:
+2607: if endex > SIZE_HMAX:
__pyx_t_1 = ((__pyx_v_endex > SIZE_HMAX) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+2608: raise OverflowError('size overflow')
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_OverflowError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 2608, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 2608, __pyx_L1_error)
+2609: elif endex > size:
__pyx_t_1 = ((__pyx_v_endex > __pyx_v_size) != 0); if (__pyx_t_1) { /* … */ }
+2610: endex = size # trim target end
__pyx_v_endex = __pyx_v_size;
2611:
+2612: if endex < start:
__pyx_t_1 = ((__pyx_v_endex < __pyx_v_start) != 0); if (__pyx_t_1) { /* … */ }
+2613: endex = start # clamp negative target length
__pyx_v_endex = __pyx_v_start;
+2614: size = endex - start
__pyx_v_size = (__pyx_v_endex - __pyx_v_start);
2615:
+2616: if size2 > size: # enlarge target at range end
__pyx_t_1 = ((__pyx_v_size2 > __pyx_v_size) != 0); if (__pyx_t_1) { /* … */ goto __pyx_L6; }
+2617: that = Rack_Reserve_(that, endex, size2 - size)
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Rack_Reserve_(__pyx_v_that, __pyx_v_endex, (__pyx_v_size2 - __pyx_v_size)); if (unlikely(__pyx_t_3 == ((Rack_ *)NULL))) __PYX_ERR(0, 2617, __pyx_L1_error)
__pyx_v_that = __pyx_t_3;
2618:
+2619: elif size > size2: # shrink target at range end
__pyx_t_1 = ((__pyx_v_size > __pyx_v_size2) != 0); if (__pyx_t_1) { /* … */ } __pyx_L6:;
+2620: endex -= size - size2
__pyx_v_endex = (__pyx_v_endex - (__pyx_v_size - __pyx_v_size2));
+2621: that = Rack_Delete_(that, endex, size - size2)
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Rack_Delete_(__pyx_v_that, __pyx_v_endex, (__pyx_v_size - __pyx_v_size2)); if (unlikely(__pyx_t_3 == ((Rack_ *)NULL))) __PYX_ERR(0, 2621, __pyx_L1_error)
__pyx_v_that = __pyx_t_3;
2622:
+2623: that = Rack_Write_(that, start, size2, buffer, direct)
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Rack_Write_(__pyx_v_that, __pyx_v_start, __pyx_v_size2, __pyx_v_buffer, __pyx_v_direct); if (unlikely(__pyx_t_3 == ((Rack_ *)NULL))) __PYX_ERR(0, 2623, __pyx_L1_error)
__pyx_v_that = __pyx_t_3;
+2624: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
2625:
2626:
+2627: cdef Rack_* Rack_WriteSlice(Rack_* that, ssize_t start, ssize_t endex,
static Rack_ *__pyx_f_10bytesparse_2_c_Rack_WriteSlice(Rack_ *__pyx_v_that, Py_ssize_t __pyx_v_start, Py_ssize_t __pyx_v_endex, size_t __pyx_v_size, Block_ **__pyx_v_buffer, int __pyx_v_direct) { Py_ssize_t __pyx_v_ssize; Py_ssize_t __pyx_v_ssize2; Py_ssize_t __pyx_v_start2; Py_ssize_t __pyx_v_endex2; Rack_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_WriteSlice", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Rack_WriteSlice", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
2628: size_t size, Block_** buffer, bint direct) except NULL:
2629: cdef:
2630: ssize_t ssize # target size
2631: ssize_t ssize2 # source size
2632: ssize_t start2 # source start
2633: ssize_t endex2 # source end
2634:
+2635: start2 = 0
__pyx_v_start2 = 0;
+2636: endex2 = <ssize_t>size
__pyx_v_endex2 = ((Py_ssize_t)__pyx_v_size);
2637:
+2638: ssize = <ssize_t>(that.endex - that.start)
__pyx_v_ssize = ((Py_ssize_t)(__pyx_v_that->endex - __pyx_v_that->start));
2639:
+2640: if start < 0:
__pyx_t_1 = ((__pyx_v_start < 0) != 0); if (__pyx_t_1) { /* … */ }
+2641: start += ssize # anchor to end
__pyx_v_start = (__pyx_v_start + __pyx_v_ssize);
+2642: if start < 0:
__pyx_t_1 = ((__pyx_v_start < 0) != 0); if (__pyx_t_1) { /* … */ }
2643: # start2 -= start # skip initial source data # as per bytearray
+2644: start = 0 # trim target start
__pyx_v_start = 0;
+2645: if start2 > endex2:
__pyx_t_1 = ((__pyx_v_start2 > __pyx_v_endex2) != 0); if (__pyx_t_1) { /* … */ }
+2646: start2 = endex2 # clamp source start
__pyx_v_start2 = __pyx_v_endex2;
2647:
+2648: if endex < 0:
__pyx_t_1 = ((__pyx_v_endex < 0) != 0); if (__pyx_t_1) { /* … */ }
+2649: endex += ssize # anchor to end
__pyx_v_endex = (__pyx_v_endex + __pyx_v_ssize);
+2650: if endex < start:
__pyx_t_1 = ((__pyx_v_endex < __pyx_v_start) != 0); if (__pyx_t_1) { /* … */ }
+2651: endex = start # clamp negative target length
__pyx_v_endex = __pyx_v_start;
2652:
+2653: if endex2 < start2:
__pyx_t_1 = ((__pyx_v_endex2 < __pyx_v_start2) != 0); if (__pyx_t_1) { /* … */ }
+2654: endex2 = start2 # clamp negative source length
__pyx_v_endex2 = __pyx_v_start2;
+2655: ssize2 = endex2 - start2
__pyx_v_ssize2 = (__pyx_v_endex2 - __pyx_v_start2);
2656:
+2657: that = Rack_WriteSlice_(that, <size_t>start, <size_t>endex, <size_t>ssize2, &buffer[start2], direct)
__pyx_t_2 = __pyx_f_10bytesparse_2_c_Rack_WriteSlice_(__pyx_v_that, ((size_t)__pyx_v_start), ((size_t)__pyx_v_endex), ((size_t)__pyx_v_ssize2), (&(__pyx_v_buffer[__pyx_v_start2])), __pyx_v_direct); if (unlikely(__pyx_t_2 == ((Rack_ *)NULL))) __PYX_ERR(0, 2657, __pyx_L1_error)
__pyx_v_that = __pyx_t_2;
+2658: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
2659:
2660:
+2661: cdef Rack_* Rack_SetSlice_(Rack_* that, size_t start, size_t endex,
static Rack_ *__pyx_f_10bytesparse_2_c_Rack_SetSlice_(Rack_ *__pyx_v_that, size_t __pyx_v_start, size_t __pyx_v_endex, Rack_ *__pyx_v_src, size_t __pyx_v_start2, size_t __pyx_v_endex2) { size_t __pyx_v_size2; PyObject *__pyx_v_size = NULL; Rack_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_SetSlice_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_6); __Pyx_AddTraceback("bytesparse._c.Rack_SetSlice_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XDECREF(__pyx_v_size); __Pyx_RefNannyFinishContext(); return __pyx_r; }
2662: Rack_* src, size_t start2, size_t endex2) except NULL:
2663: cdef:
2664: size_t size2 # source size
2665:
+2666: size2 = src.endex - src.start
__pyx_v_size2 = (__pyx_v_src->endex - __pyx_v_src->start);
2667:
+2668: if start2 > SIZE_HMAX:
__pyx_t_1 = ((__pyx_v_start2 > SIZE_HMAX) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+2669: raise OverflowError('size overflow')
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_OverflowError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 2669, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 2669, __pyx_L1_error)
+2670: elif start2 > size2:
__pyx_t_1 = ((__pyx_v_start2 > __pyx_v_size2) != 0); if (__pyx_t_1) { /* … */ }
+2671: start2 = size2 # trim source start
__pyx_v_start2 = __pyx_v_size2;
2672:
+2673: if endex2 > SIZE_HMAX:
__pyx_t_1 = ((__pyx_v_endex2 > SIZE_HMAX) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+2674: raise OverflowError('size overflow')
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_OverflowError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 2674, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 2674, __pyx_L1_error)
+2675: elif endex2 > size2:
__pyx_t_1 = ((__pyx_v_endex2 > __pyx_v_size2) != 0); if (__pyx_t_1) { /* … */ }
+2676: endex2 = size2 # trim source end
__pyx_v_endex2 = __pyx_v_size2;
2677:
+2678: if endex2 < start2:
__pyx_t_1 = ((__pyx_v_endex2 < __pyx_v_start2) != 0); if (__pyx_t_1) { /* … */ }
+2679: endex2 = start2 # clamp negative source length
__pyx_v_endex2 = __pyx_v_start2;
+2680: size2 = endex2 - start2
__pyx_v_size2 = (__pyx_v_endex2 - __pyx_v_start2);
2681:
+2682: size = that.endex - that.start
__pyx_t_2 = __Pyx_PyInt_FromSize_t((__pyx_v_that->endex - __pyx_v_that->start)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 2682, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_v_size = __pyx_t_2; __pyx_t_2 = 0;
2683:
+2684: if start > SIZE_HMAX:
__pyx_t_1 = ((__pyx_v_start > SIZE_HMAX) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+2685: raise OverflowError('size overflow')
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_OverflowError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 2685, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 2685, __pyx_L1_error)
+2686: elif start > size:
__pyx_t_2 = __Pyx_PyInt_FromSize_t(__pyx_v_start); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 2686, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_3 = PyObject_RichCompare(__pyx_t_2, __pyx_v_size, Py_GT); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 2686, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 2686, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; if (__pyx_t_1) { /* … */ }
+2687: start = size # trim target start
__pyx_t_4 = __Pyx_PyInt_As_size_t(__pyx_v_size); if (unlikely((__pyx_t_4 == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 2687, __pyx_L1_error) __pyx_v_start = __pyx_t_4;
2688:
+2689: if endex > SIZE_HMAX:
__pyx_t_1 = ((__pyx_v_endex > SIZE_HMAX) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+2690: raise OverflowError('size overflow')
__pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_OverflowError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 2690, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_Raise(__pyx_t_3, 0, 0, 0); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(0, 2690, __pyx_L1_error)
+2691: elif endex > size:
__pyx_t_3 = __Pyx_PyInt_FromSize_t(__pyx_v_endex); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 2691, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_2 = PyObject_RichCompare(__pyx_t_3, __pyx_v_size, Py_GT); __Pyx_XGOTREF(__pyx_t_2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 2691, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_2); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 2691, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; if (__pyx_t_1) { /* … */ }
+2692: endex = size # trim target end
__pyx_t_4 = __Pyx_PyInt_As_size_t(__pyx_v_size); if (unlikely((__pyx_t_4 == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 2692, __pyx_L1_error) __pyx_v_endex = __pyx_t_4;
2693:
+2694: if endex < start:
__pyx_t_1 = ((__pyx_v_endex < __pyx_v_start) != 0); if (__pyx_t_1) { /* … */ }
+2695: endex = start # clamp negative target length
__pyx_v_endex = __pyx_v_start;
+2696: size = endex - start
__pyx_t_2 = __Pyx_PyInt_FromSize_t((__pyx_v_endex - __pyx_v_start)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 2696, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF_SET(__pyx_v_size, __pyx_t_2); __pyx_t_2 = 0;
2697:
+2698: if size2 > size: # enlarge target at range end
__pyx_t_2 = __Pyx_PyInt_FromSize_t(__pyx_v_size2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 2698, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_3 = PyObject_RichCompare(__pyx_t_2, __pyx_v_size, Py_GT); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 2698, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 2698, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; if (__pyx_t_1) { /* … */ goto __pyx_L9; }
+2699: that = Rack_Reserve_(that, endex, size2 - size)
__pyx_t_3 = __Pyx_PyInt_FromSize_t(__pyx_v_size2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 2699, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_2 = PyNumber_Subtract(__pyx_t_3, __pyx_v_size); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 2699, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __pyx_t_4 = __Pyx_PyInt_As_size_t(__pyx_t_2); if (unlikely((__pyx_t_4 == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 2699, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_t_5 = __pyx_f_10bytesparse_2_c_Rack_Reserve_(__pyx_v_that, __pyx_v_endex, __pyx_t_4); if (unlikely(__pyx_t_5 == ((Rack_ *)NULL))) __PYX_ERR(0, 2699, __pyx_L1_error) __pyx_v_that = __pyx_t_5;
2700:
+2701: elif size > size2: # shrink target at range end
__pyx_t_2 = __Pyx_PyInt_FromSize_t(__pyx_v_size2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 2701, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_3 = PyObject_RichCompare(__pyx_v_size, __pyx_t_2, Py_GT); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 2701, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 2701, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; if (__pyx_t_1) { /* … */ } __pyx_L9:;
+2702: endex -= size - size2
__pyx_t_3 = __Pyx_PyInt_FromSize_t(__pyx_v_endex); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 2702, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_2 = __Pyx_PyInt_FromSize_t(__pyx_v_size2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 2702, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_6 = PyNumber_Subtract(__pyx_v_size, __pyx_t_2); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 2702, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_t_2 = PyNumber_InPlaceSubtract(__pyx_t_3, __pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 2702, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __pyx_t_4 = __Pyx_PyInt_As_size_t(__pyx_t_2); if (unlikely((__pyx_t_4 == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 2702, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_v_endex = __pyx_t_4;
+2703: that = Rack_Delete_(that, endex, size - size2)
__pyx_t_2 = __Pyx_PyInt_FromSize_t(__pyx_v_size2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 2703, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_6 = PyNumber_Subtract(__pyx_v_size, __pyx_t_2); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 2703, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_t_4 = __Pyx_PyInt_As_size_t(__pyx_t_6); if (unlikely((__pyx_t_4 == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 2703, __pyx_L1_error) __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __pyx_t_5 = __pyx_f_10bytesparse_2_c_Rack_Delete_(__pyx_v_that, __pyx_v_endex, __pyx_t_4); if (unlikely(__pyx_t_5 == ((Rack_ *)NULL))) __PYX_ERR(0, 2703, __pyx_L1_error) __pyx_v_that = __pyx_t_5;
2704:
+2705: that = Rack_Write_(that, start, size2, &src.blocks[src.start + start2], False)
__pyx_t_5 = __pyx_f_10bytesparse_2_c_Rack_Write_(__pyx_v_that, __pyx_v_start, __pyx_v_size2, (&(__pyx_v_src->blocks[(__pyx_v_src->start + __pyx_v_start2)])), 0); if (unlikely(__pyx_t_5 == ((Rack_ *)NULL))) __PYX_ERR(0, 2705, __pyx_L1_error)
__pyx_v_that = __pyx_t_5;
+2706: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
2707:
2708:
+2709: cdef Rack_* Rack_SetSlice(Rack_* that, ssize_t start, ssize_t endex,
static Rack_ *__pyx_f_10bytesparse_2_c_Rack_SetSlice(Rack_ *__pyx_v_that, Py_ssize_t __pyx_v_start, Py_ssize_t __pyx_v_endex, Rack_ *__pyx_v_src, Py_ssize_t __pyx_v_start2, Py_ssize_t __pyx_v_endex2) { Py_ssize_t __pyx_v_ssize; Py_ssize_t __pyx_v_ssize2; Rack_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_SetSlice", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Rack_SetSlice", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
2710: Rack_* src, ssize_t start2, ssize_t endex2) except NULL:
2711: cdef:
2712: ssize_t ssize # target size
2713: ssize_t ssize2 # source size
2714:
+2715: ssize = <ssize_t>(that.endex - that.start)
__pyx_v_ssize = ((Py_ssize_t)(__pyx_v_that->endex - __pyx_v_that->start));
+2716: ssize2 = <ssize_t>(src.endex - src.start)
__pyx_v_ssize2 = ((Py_ssize_t)(__pyx_v_src->endex - __pyx_v_src->start));
2717:
+2718: if start < 0:
__pyx_t_1 = ((__pyx_v_start < 0) != 0); if (__pyx_t_1) { /* … */ }
+2719: start += ssize # anchor to target end
__pyx_v_start = (__pyx_v_start + __pyx_v_ssize);
+2720: if start < 0:
__pyx_t_1 = ((__pyx_v_start < 0) != 0); if (__pyx_t_1) { /* … */ }
2721: # start2 -= start # skip initial source data # as per bytearray
+2722: start = 0 # trim target start
__pyx_v_start = 0;
2723:
+2724: if endex < 0:
__pyx_t_1 = ((__pyx_v_endex < 0) != 0); if (__pyx_t_1) { /* … */ }
+2725: endex += ssize # anchor to target end
__pyx_v_endex = (__pyx_v_endex + __pyx_v_ssize);
+2726: if endex < start:
__pyx_t_1 = ((__pyx_v_endex < __pyx_v_start) != 0); if (__pyx_t_1) { /* … */ }
+2727: endex = start # clamp negative target length
__pyx_v_endex = __pyx_v_start;
2728:
+2729: if start2 < 0:
__pyx_t_1 = ((__pyx_v_start2 < 0) != 0); if (__pyx_t_1) { /* … */ }
+2730: start2 += ssize2 # anchor to source end
__pyx_v_start2 = (__pyx_v_start2 + __pyx_v_ssize2);
+2731: if start2 < 0:
__pyx_t_1 = ((__pyx_v_start2 < 0) != 0); if (__pyx_t_1) { /* … */ }
+2732: start2 = 0 # trim source start
__pyx_v_start2 = 0;
2733:
+2734: if endex2 < 0:
__pyx_t_1 = ((__pyx_v_endex2 < 0) != 0); if (__pyx_t_1) { /* … */ }
+2735: endex2 += ssize2 # anchor to source end
__pyx_v_endex2 = (__pyx_v_endex2 + __pyx_v_ssize2);
+2736: if endex2 < start2:
__pyx_t_1 = ((__pyx_v_endex2 < __pyx_v_start2) != 0); if (__pyx_t_1) { /* … */ }
+2737: endex2 = start2 # clamp negative source length
__pyx_v_endex2 = __pyx_v_start2;
2738:
+2739: that = Rack_SetSlice_(that, <size_t>start, <size_t>endex, src, <size_t>start2, <size_t>endex2)
__pyx_t_2 = __pyx_f_10bytesparse_2_c_Rack_SetSlice_(__pyx_v_that, ((size_t)__pyx_v_start), ((size_t)__pyx_v_endex), __pyx_v_src, ((size_t)__pyx_v_start2), ((size_t)__pyx_v_endex2)); if (unlikely(__pyx_t_2 == ((Rack_ *)NULL))) __PYX_ERR(0, 2739, __pyx_L1_error)
__pyx_v_that = __pyx_t_2;
+2740: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
2741:
2742:
+2743: cdef Rack_* Rack_DelSlice_(Rack_* that, size_t start, size_t endex) except NULL:
static Rack_ *__pyx_f_10bytesparse_2_c_Rack_DelSlice_(Rack_ *__pyx_v_that, size_t __pyx_v_start, size_t __pyx_v_endex) { size_t __pyx_v_size; Rack_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_DelSlice_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.Rack_DelSlice_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
2744: cdef:
2745: size_t size
2746:
+2747: size = that.endex - that.start
__pyx_v_size = (__pyx_v_that->endex - __pyx_v_that->start);
2748:
+2749: if start > SIZE_HMAX:
__pyx_t_1 = ((__pyx_v_start > SIZE_HMAX) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+2750: raise OverflowError('size overflow')
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_OverflowError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 2750, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 2750, __pyx_L1_error)
+2751: elif start > size:
__pyx_t_1 = ((__pyx_v_start > __pyx_v_size) != 0); if (__pyx_t_1) { /* … */ }
+2752: start = size # trim start
__pyx_v_start = __pyx_v_size;
2753:
+2754: if endex > SIZE_HMAX:
__pyx_t_1 = ((__pyx_v_endex > SIZE_HMAX) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+2755: raise OverflowError('size overflow')
__pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_OverflowError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 2755, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 2755, __pyx_L1_error)
+2756: elif endex < start:
__pyx_t_1 = ((__pyx_v_endex < __pyx_v_start) != 0); if (__pyx_t_1) { /* … */ goto __pyx_L4; }
+2757: endex = start # clamp negative length
__pyx_v_endex = __pyx_v_start;
+2758: elif endex > size:
__pyx_t_1 = ((__pyx_v_endex > __pyx_v_size) != 0); if (__pyx_t_1) { /* … */ } __pyx_L4:;
+2759: endex = size # trim end
__pyx_v_endex = __pyx_v_size;
2760:
+2761: that = Rack_Delete_(that, start, (endex - start))
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Rack_Delete_(__pyx_v_that, __pyx_v_start, (__pyx_v_endex - __pyx_v_start)); if (unlikely(__pyx_t_3 == ((Rack_ *)NULL))) __PYX_ERR(0, 2761, __pyx_L1_error)
__pyx_v_that = __pyx_t_3;
+2762: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
2763:
2764:
+2765: cdef Rack_* Rack_DelSlice(Rack_* that, ssize_t start, ssize_t endex) except NULL:
static Rack_ *__pyx_f_10bytesparse_2_c_Rack_DelSlice(Rack_ *__pyx_v_that, Py_ssize_t __pyx_v_start, Py_ssize_t __pyx_v_endex) { Py_ssize_t __pyx_v_ssize; Rack_ *__pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_DelSlice", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Rack_DelSlice", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
2766: cdef:
2767: ssize_t ssize
2768:
+2769: ssize = <ssize_t>(that.endex - that.start)
__pyx_v_ssize = ((Py_ssize_t)(__pyx_v_that->endex - __pyx_v_that->start));
2770:
+2771: if start < 0:
__pyx_t_1 = ((__pyx_v_start < 0) != 0); if (__pyx_t_1) { /* … */ }
+2772: start += ssize # anchor to end
__pyx_v_start = (__pyx_v_start + __pyx_v_ssize);
+2773: if start < 0:
__pyx_t_1 = ((__pyx_v_start < 0) != 0); if (__pyx_t_1) { /* … */ }
+2774: start = 0 # trim start
__pyx_v_start = 0;
2775:
+2776: if endex < 0:
__pyx_t_1 = ((__pyx_v_endex < 0) != 0); if (__pyx_t_1) { /* … */ }
+2777: endex += ssize # anchor to end
__pyx_v_endex = (__pyx_v_endex + __pyx_v_ssize);
+2778: if endex < start:
__pyx_t_1 = ((__pyx_v_endex < __pyx_v_start) != 0); if (__pyx_t_1) { /* … */ }
+2779: endex = start # clamp negative length
__pyx_v_endex = __pyx_v_start;
2780:
+2781: that = Rack_DelSlice_(that, <size_t>start, <size_t>endex)
__pyx_t_2 = __pyx_f_10bytesparse_2_c_Rack_DelSlice_(__pyx_v_that, ((size_t)__pyx_v_start), ((size_t)__pyx_v_endex)); if (unlikely(__pyx_t_2 == ((Rack_ *)NULL))) __PYX_ERR(0, 2781, __pyx_L1_error)
__pyx_v_that = __pyx_t_2;
+2782: return that
__pyx_r = __pyx_v_that; goto __pyx_L0;
2783:
2784:
+2785: cdef ssize_t Rack_IndexAt(const Rack_* that, addr_t address) except -2:
static Py_ssize_t __pyx_f_10bytesparse_2_c_Rack_IndexAt(Rack_ const *__pyx_v_that, addr_t __pyx_v_address) { Py_ssize_t __pyx_v_left; Py_ssize_t __pyx_v_right; Py_ssize_t __pyx_v_center; Block_ const *__pyx_v_block; Py_ssize_t __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_IndexAt", 0); /* … */ /* function exit code */ __pyx_r = 0; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
2786: cdef:
+2787: ssize_t left = 0
__pyx_v_left = 0;
+2788: ssize_t right = <ssize_t>(that.endex - that.start)
__pyx_v_right = ((Py_ssize_t)(__pyx_v_that->endex - __pyx_v_that->start));
2789: ssize_t center
2790: const Block_* block
2791:
+2792: if right:
__pyx_t_1 = (__pyx_v_right != 0); if (__pyx_t_1) { /* … */ goto __pyx_L3; }
+2793: block = that.blocks[that.start]
__pyx_v_block = (__pyx_v_that->blocks[__pyx_v_that->start]);
+2794: if address < Block_Start(block):
__pyx_t_1 = ((__pyx_v_address < __pyx_f_10bytesparse_2_c_Block_Start(__pyx_v_block)) != 0); if (__pyx_t_1) { /* … */ }
+2795: return -1
__pyx_r = -1L; goto __pyx_L0;
2796:
+2797: block = that.blocks[that.endex - 1]
__pyx_v_block = (__pyx_v_that->blocks[(__pyx_v_that->endex - 1)]);
+2798: if Block_Endex(block) <= address:
__pyx_t_1 = ((__pyx_f_10bytesparse_2_c_Block_Endex(__pyx_v_block) <= __pyx_v_address) != 0); if (__pyx_t_1) { /* … */ }
+2799: return -1
__pyx_r = -1L; goto __pyx_L0;
2800: else:
+2801: return -1
/*else*/ { __pyx_r = -1L; goto __pyx_L0; } __pyx_L3:;
2802:
+2803: while left <= right:
while (1) { __pyx_t_1 = ((__pyx_v_left <= __pyx_v_right) != 0); if (!__pyx_t_1) break;
+2804: center = (left + right) >> 1
__pyx_v_center = ((__pyx_v_left + __pyx_v_right) >> 1);
+2805: block = that.blocks[that.start + center]
__pyx_v_block = (__pyx_v_that->blocks[(__pyx_v_that->start + __pyx_v_center)]);
2806:
+2807: if Block_Endex(block) <= address:
__pyx_t_1 = ((__pyx_f_10bytesparse_2_c_Block_Endex(__pyx_v_block) <= __pyx_v_address) != 0); if (__pyx_t_1) { /* … */ goto __pyx_L8; }
+2808: left = center + 1
__pyx_v_left = (__pyx_v_center + 1);
+2809: elif address < Block_Start(block):
__pyx_t_1 = ((__pyx_v_address < __pyx_f_10bytesparse_2_c_Block_Start(__pyx_v_block)) != 0); if (__pyx_t_1) { /* … */ goto __pyx_L8; }
+2810: right = center - 1
__pyx_v_right = (__pyx_v_center - 1);
2811: else:
+2812: return center
/*else*/ { __pyx_r = __pyx_v_center; goto __pyx_L0; } __pyx_L8:; }
2813: else:
+2814: return -1
/*else*/ { __pyx_r = -1L; goto __pyx_L0; }
2815:
2816:
+2817: cdef ssize_t Rack_IndexStart(const Rack_* that, addr_t address) except -2:
static Py_ssize_t __pyx_f_10bytesparse_2_c_Rack_IndexStart(Rack_ const *__pyx_v_that, addr_t __pyx_v_address) { Py_ssize_t __pyx_v_left; Py_ssize_t __pyx_v_right; Py_ssize_t __pyx_v_center; Block_ const *__pyx_v_block; Py_ssize_t __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_IndexStart", 0); /* … */ /* function exit code */ __pyx_r = 0; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
2818: cdef:
+2819: ssize_t left = 0
__pyx_v_left = 0;
+2820: ssize_t right = <ssize_t>(that.endex - that.start)
__pyx_v_right = ((Py_ssize_t)(__pyx_v_that->endex - __pyx_v_that->start));
2821: ssize_t center
2822: const Block_* block
2823:
+2824: if right:
__pyx_t_1 = (__pyx_v_right != 0); if (__pyx_t_1) { /* … */ goto __pyx_L3; }
+2825: block = that.blocks[that.start]
__pyx_v_block = (__pyx_v_that->blocks[__pyx_v_that->start]);
+2826: if address <= Block_Start(block):
__pyx_t_1 = ((__pyx_v_address <= __pyx_f_10bytesparse_2_c_Block_Start(__pyx_v_block)) != 0); if (__pyx_t_1) { /* … */ }
+2827: return 0
__pyx_r = 0; goto __pyx_L0;
2828:
+2829: block = that.blocks[that.endex - 1]
__pyx_v_block = (__pyx_v_that->blocks[(__pyx_v_that->endex - 1)]);
+2830: if Block_Endex(block) <= address:
__pyx_t_1 = ((__pyx_f_10bytesparse_2_c_Block_Endex(__pyx_v_block) <= __pyx_v_address) != 0); if (__pyx_t_1) { /* … */ }
+2831: return right
__pyx_r = __pyx_v_right; goto __pyx_L0;
2832: else:
+2833: return 0
/*else*/ { __pyx_r = 0; goto __pyx_L0; } __pyx_L3:;
2834:
+2835: while left <= right:
while (1) { __pyx_t_1 = ((__pyx_v_left <= __pyx_v_right) != 0); if (!__pyx_t_1) break;
+2836: center = (left + right) >> 1
__pyx_v_center = ((__pyx_v_left + __pyx_v_right) >> 1);
+2837: block = that.blocks[that.start + center]
__pyx_v_block = (__pyx_v_that->blocks[(__pyx_v_that->start + __pyx_v_center)]);
2838:
+2839: if Block_Endex(block) <= address:
__pyx_t_1 = ((__pyx_f_10bytesparse_2_c_Block_Endex(__pyx_v_block) <= __pyx_v_address) != 0); if (__pyx_t_1) { /* … */ goto __pyx_L8; }
+2840: left = center + 1
__pyx_v_left = (__pyx_v_center + 1);
+2841: elif address < Block_Start(block):
__pyx_t_1 = ((__pyx_v_address < __pyx_f_10bytesparse_2_c_Block_Start(__pyx_v_block)) != 0); if (__pyx_t_1) { /* … */ goto __pyx_L8; }
+2842: right = center - 1
__pyx_v_right = (__pyx_v_center - 1);
2843: else:
+2844: return center
/*else*/ { __pyx_r = __pyx_v_center; goto __pyx_L0; } __pyx_L8:; }
2845: else:
+2846: return left
/*else*/ { __pyx_r = __pyx_v_left; goto __pyx_L0; }
2847:
2848:
+2849: cdef ssize_t Rack_IndexEndex(const Rack_* that, addr_t address) except -2:
static Py_ssize_t __pyx_f_10bytesparse_2_c_Rack_IndexEndex(Rack_ const *__pyx_v_that, addr_t __pyx_v_address) { Py_ssize_t __pyx_v_left; Py_ssize_t __pyx_v_right; Py_ssize_t __pyx_v_center; Block_ const *__pyx_v_block; Py_ssize_t __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("Rack_IndexEndex", 0); /* … */ /* function exit code */ __pyx_r = 0; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
2850: cdef:
+2851: ssize_t left = 0
__pyx_v_left = 0;
+2852: ssize_t right = <ssize_t>(that.endex - that.start)
__pyx_v_right = ((Py_ssize_t)(__pyx_v_that->endex - __pyx_v_that->start));
2853: ssize_t center
2854: const Block_* block
2855:
+2856: if right:
__pyx_t_1 = (__pyx_v_right != 0); if (__pyx_t_1) { /* … */ goto __pyx_L3; }
+2857: block = that.blocks[that.start]
__pyx_v_block = (__pyx_v_that->blocks[__pyx_v_that->start]);
+2858: if address < Block_Start(block):
__pyx_t_1 = ((__pyx_v_address < __pyx_f_10bytesparse_2_c_Block_Start(__pyx_v_block)) != 0); if (__pyx_t_1) { /* … */ }
+2859: return 0
__pyx_r = 0; goto __pyx_L0;
2860:
+2861: block = that.blocks[that.endex - 1]
__pyx_v_block = (__pyx_v_that->blocks[(__pyx_v_that->endex - 1)]);
+2862: if Block_Endex(block) <= address:
__pyx_t_1 = ((__pyx_f_10bytesparse_2_c_Block_Endex(__pyx_v_block) <= __pyx_v_address) != 0); if (__pyx_t_1) { /* … */ }
+2863: return right
__pyx_r = __pyx_v_right; goto __pyx_L0;
2864: else:
+2865: return 0
/*else*/ { __pyx_r = 0; goto __pyx_L0; } __pyx_L3:;
2866:
+2867: while left <= right:
while (1) { __pyx_t_1 = ((__pyx_v_left <= __pyx_v_right) != 0); if (!__pyx_t_1) break;
+2868: center = (left + right) >> 1
__pyx_v_center = ((__pyx_v_left + __pyx_v_right) >> 1);
+2869: block = that.blocks[that.start + center]
__pyx_v_block = (__pyx_v_that->blocks[(__pyx_v_that->start + __pyx_v_center)]);
2870:
+2871: if Block_Endex(block) <= address:
__pyx_t_1 = ((__pyx_f_10bytesparse_2_c_Block_Endex(__pyx_v_block) <= __pyx_v_address) != 0); if (__pyx_t_1) { /* … */ goto __pyx_L8; }
+2872: left = center + 1
__pyx_v_left = (__pyx_v_center + 1);
+2873: elif address < Block_Start(block):
__pyx_t_1 = ((__pyx_v_address < __pyx_f_10bytesparse_2_c_Block_Start(__pyx_v_block)) != 0); if (__pyx_t_1) { /* … */ goto __pyx_L8; }
+2874: right = center - 1
__pyx_v_right = (__pyx_v_center - 1);
2875: else:
+2876: return center + 1
/*else*/ { __pyx_r = (__pyx_v_center + 1); goto __pyx_L0; } __pyx_L8:; }
2877: else:
+2878: return right + 1
/*else*/ { __pyx_r = (__pyx_v_right + 1); goto __pyx_L0; }
2879:
2880:
2881: # =====================================================================================================================
2882:
+2883: cdef class Rover:
struct __pyx_vtabstruct_10bytesparse_2_c_Rover { int (*next_)(struct __pyx_obj_10bytesparse_2_c_Rover *); __pyx_t_10bytesparse_2_c_vint (*dispose_)(struct __pyx_obj_10bytesparse_2_c_Rover *); }; static struct __pyx_vtabstruct_10bytesparse_2_c_Rover *__pyx_vtabptr_10bytesparse_2_c_Rover;
2884:
+2885: def __cinit__(self):
/* Python wrapper */ static int __pyx_pw_10bytesparse_2_c_5Rover_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static int __pyx_pw_10bytesparse_2_c_5Rover_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__cinit__ (wrapper)", 0); if (unlikely(PyTuple_GET_SIZE(__pyx_args) > 0)) { __Pyx_RaiseArgtupleInvalid("__cinit__", 1, 0, 0, PyTuple_GET_SIZE(__pyx_args)); return -1;} if (unlikely(__pyx_kwds) && unlikely(PyDict_Size(__pyx_kwds) > 0) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__cinit__", 0))) return -1; __pyx_r = __pyx_pf_10bytesparse_2_c_5Rover___cinit__(((struct __pyx_obj_10bytesparse_2_c_Rover *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static int __pyx_pf_10bytesparse_2_c_5Rover___cinit__(CYTHON_UNUSED struct __pyx_obj_10bytesparse_2_c_Rover *__pyx_v_self) { int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__cinit__", 0); /* function exit code */ __pyx_r = 0; __Pyx_RefNannyFinishContext(); return __pyx_r; }
2886: pass
2887:
+2888: def __dealloc__(self):
/* Python wrapper */ static void __pyx_pw_10bytesparse_2_c_5Rover_3__dealloc__(PyObject *__pyx_v_self); /*proto*/ static void __pyx_pw_10bytesparse_2_c_5Rover_3__dealloc__(PyObject *__pyx_v_self) { __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__dealloc__ (wrapper)", 0); __pyx_pf_10bytesparse_2_c_5Rover_2__dealloc__(((struct __pyx_obj_10bytesparse_2_c_Rover *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); } static void __pyx_pf_10bytesparse_2_c_5Rover_2__dealloc__(struct __pyx_obj_10bytesparse_2_c_Rover *__pyx_v_self) { __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__dealloc__", 0); /* … */ /* function exit code */ goto __pyx_L0; __pyx_L1_error:; __Pyx_WriteUnraisable("bytesparse._c.Rover.__dealloc__", __pyx_clineno, __pyx_lineno, __pyx_filename, 1, 0); __pyx_L0:; __Pyx_RefNannyFinishContext(); }
+2889: self.dispose_()
__pyx_t_1 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Rover *)__pyx_v_self->__pyx_vtab)->dispose_(__pyx_v_self); if (unlikely(__pyx_t_1 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 2889, __pyx_L1_error)
2890:
+2891: def __init__(
/* Python wrapper */ static int __pyx_pw_10bytesparse_2_c_5Rover_5__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static int __pyx_pw_10bytesparse_2_c_5Rover_5__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_memory = 0; addr_t __pyx_v_start; addr_t __pyx_v_endex; PyObject *__pyx_v_pattern = 0; int __pyx_v_forward; int __pyx_v_infinite; int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_memory,&__pyx_n_s_start,&__pyx_n_s_endex,&__pyx_n_s_pattern,&__pyx_n_s_forward,&__pyx_n_s_infinite,0}; PyObject* values[6] = {0,0,0,0,0,0}; if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5); CYTHON_FALLTHROUGH; case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); CYTHON_FALLTHROUGH; case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); CYTHON_FALLTHROUGH; case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_memory)) != 0)) kw_args--; else goto __pyx_L5_argtuple_error; CYTHON_FALLTHROUGH; case 1: if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_start)) != 0)) kw_args--; else { __Pyx_RaiseArgtupleInvalid("__init__", 1, 6, 6, 1); __PYX_ERR(0, 2891, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 2: if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_endex)) != 0)) kw_args--; else { __Pyx_RaiseArgtupleInvalid("__init__", 1, 6, 6, 2); __PYX_ERR(0, 2891, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 3: if (likely((values[3] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pattern)) != 0)) kw_args--; else { __Pyx_RaiseArgtupleInvalid("__init__", 1, 6, 6, 3); __PYX_ERR(0, 2891, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 4: if (likely((values[4] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_forward)) != 0)) kw_args--; else { __Pyx_RaiseArgtupleInvalid("__init__", 1, 6, 6, 4); __PYX_ERR(0, 2891, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 5: if (likely((values[5] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_infinite)) != 0)) kw_args--; else { __Pyx_RaiseArgtupleInvalid("__init__", 1, 6, 6, 5); __PYX_ERR(0, 2891, __pyx_L3_error) } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) __PYX_ERR(0, 2891, __pyx_L3_error) } } else if (PyTuple_GET_SIZE(__pyx_args) != 6) { goto __pyx_L5_argtuple_error; } else { values[0] = PyTuple_GET_ITEM(__pyx_args, 0); values[1] = PyTuple_GET_ITEM(__pyx_args, 1); values[2] = PyTuple_GET_ITEM(__pyx_args, 2); values[3] = PyTuple_GET_ITEM(__pyx_args, 3); values[4] = PyTuple_GET_ITEM(__pyx_args, 4); values[5] = PyTuple_GET_ITEM(__pyx_args, 5); } __pyx_v_memory = ((struct __pyx_obj_10bytesparse_2_c_Memory *)values[0]); __pyx_v_start = __Pyx_PyInt_As_uint_fast64_t(values[1]); if (unlikely((__pyx_v_start == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 2894, __pyx_L3_error) __pyx_v_endex = __Pyx_PyInt_As_uint_fast64_t(values[2]); if (unlikely((__pyx_v_endex == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 2895, __pyx_L3_error) __pyx_v_pattern = values[3]; __pyx_v_forward = __Pyx_PyObject_IsTrue(values[4]); if (unlikely((__pyx_v_forward == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 2897, __pyx_L3_error) __pyx_v_infinite = __Pyx_PyObject_IsTrue(values[5]); if (unlikely((__pyx_v_infinite == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 2898, __pyx_L3_error) } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("__init__", 1, 6, 6, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 2891, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("bytesparse._c.Rover.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return -1; __pyx_L4_argument_unpacking_done:; if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_memory), __pyx_ptype_10bytesparse_2_c_Memory, 0, "memory", 0))) __PYX_ERR(0, 2893, __pyx_L1_error) __pyx_r = __pyx_pf_10bytesparse_2_c_5Rover_4__init__(((struct __pyx_obj_10bytesparse_2_c_Rover *)__pyx_v_self), __pyx_v_memory, __pyx_v_start, __pyx_v_endex, __pyx_v_pattern, __pyx_v_forward, __pyx_v_infinite); int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; /* function exit code */ goto __pyx_L0; __pyx_L1_error:; __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; } static int __pyx_pf_10bytesparse_2_c_5Rover_4__init__(struct __pyx_obj_10bytesparse_2_c_Rover *__pyx_v_self, struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_memory, addr_t __pyx_v_start, addr_t __pyx_v_endex, PyObject *__pyx_v_pattern, int __pyx_v_forward, int __pyx_v_infinite) { Block_ *__pyx_v_block; __Pyx_memviewslice __pyx_v_view = { 0, 0, { 0 }, { 0 }, { 0 } }; addr_t __pyx_v_offset; int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__init__", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __PYX_XDEC_MEMVIEW(&__pyx_t_7, 1); __Pyx_XDECREF(__pyx_t_9); __Pyx_XDECREF(__pyx_t_10); __Pyx_XDECREF(__pyx_t_11); __Pyx_XDECREF(__pyx_t_12); __Pyx_AddTraceback("bytesparse._c.Rover.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __PYX_XDEC_MEMVIEW(&__pyx_v_view, 1); __Pyx_RefNannyFinishContext(); return __pyx_r; }
2892: self,
2893: Memory memory not None,
2894: addr_t start,
2895: addr_t endex,
2896: object pattern,
2897: bint forward,
2898: bint infinite,
2899: ):
2900: cdef:
+2901: Block_* block = NULL
__pyx_v_block = NULL;
2902: const byte_t[:] view
2903: addr_t offset
2904:
+2905: if forward:
__pyx_t_1 = (__pyx_v_forward != 0); if (__pyx_t_1) { /* … */ goto __pyx_L3; }
+2906: if endex < start:
__pyx_t_1 = ((__pyx_v_endex < __pyx_v_start) != 0); if (__pyx_t_1) { /* … */ }
+2907: endex = start
__pyx_v_endex = __pyx_v_start;
2908: else:
+2909: if start > endex:
/*else*/ { __pyx_t_1 = ((__pyx_v_start > __pyx_v_endex) != 0); if (__pyx_t_1) { /* … */ } } __pyx_L3:;
+2910: start = endex
__pyx_v_start = __pyx_v_endex;
2911:
+2912: if pattern is not None:
__pyx_t_1 = (__pyx_v_pattern != Py_None); __pyx_t_2 = (__pyx_t_1 != 0); if (__pyx_t_2) { /* … */ }
+2913: if isinstance(pattern, int):
__pyx_t_2 = PyInt_Check(__pyx_v_pattern);
__pyx_t_1 = (__pyx_t_2 != 0);
if (__pyx_t_1) {
/* … */
goto __pyx_L7;
}
+2914: self._pattern_value = <byte_t>pattern
__pyx_t_3 = __Pyx_PyInt_As_byte_t(__pyx_v_pattern); if (unlikely((__pyx_t_3 == ((byte_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 2914, __pyx_L1_error) __pyx_v_self->_pattern_value = ((byte_t)__pyx_t_3);
+2915: self._pattern_data = &self._pattern_value
__pyx_v_self->_pattern_data = (&__pyx_v_self->_pattern_value);
+2916: self._pattern_size = 1
__pyx_v_self->_pattern_size = 1;
2917: else:
+2918: try:
/*else*/ { { /*try:*/ { /* … */ } __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; goto __pyx_L13_try_end; __pyx_L8_error:; __PYX_XDEC_MEMVIEW(&__pyx_t_7, 1); /* … */ __Pyx_XGIVEREF(__pyx_t_4); __Pyx_XGIVEREF(__pyx_t_5); __Pyx_XGIVEREF(__pyx_t_6); __Pyx_ExceptionReset(__pyx_t_4, __pyx_t_5, __pyx_t_6); goto __pyx_L1_error; __pyx_L9_exception_handled:; __Pyx_XGIVEREF(__pyx_t_4); __Pyx_XGIVEREF(__pyx_t_5); __Pyx_XGIVEREF(__pyx_t_6); __Pyx_ExceptionReset(__pyx_t_4, __pyx_t_5, __pyx_t_6); __pyx_L13_try_end:; }
+2919: view = pattern
__pyx_t_7 = __Pyx_PyObject_to_MemoryviewSlice_ds_nn_byte_t__const__(__pyx_v_pattern, 0); if (unlikely(!__pyx_t_7.memview)) __PYX_ERR(0, 2919, __pyx_L8_error) __pyx_v_view = __pyx_t_7; __pyx_t_7.memview = NULL; __pyx_t_7.data = NULL;
+2920: except TypeError:
__pyx_t_8 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_TypeError); if (__pyx_t_8) { __Pyx_AddTraceback("bytesparse._c.Rover.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); if (__Pyx_GetException(&__pyx_t_9, &__pyx_t_10, &__pyx_t_11) < 0) __PYX_ERR(0, 2920, __pyx_L10_except_error) __Pyx_GOTREF(__pyx_t_9); __Pyx_GOTREF(__pyx_t_10); __Pyx_GOTREF(__pyx_t_11);
+2921: view = bytes(pattern)
__pyx_t_12 = __Pyx_PyObject_CallOneArg(((PyObject *)(&PyBytes_Type)), __pyx_v_pattern); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 2921, __pyx_L10_except_error) __Pyx_GOTREF(__pyx_t_12); __pyx_t_7 = __Pyx_PyObject_to_MemoryviewSlice_ds_nn_byte_t__const__(__pyx_t_12, 0); if (unlikely(!__pyx_t_7.memview)) __PYX_ERR(0, 2921, __pyx_L10_except_error) __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; __PYX_XDEC_MEMVIEW(&__pyx_v_view, 1); __pyx_v_view = __pyx_t_7; __pyx_t_7.memview = NULL; __pyx_t_7.data = NULL; __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; goto __pyx_L9_exception_handled; } goto __pyx_L10_except_error; __pyx_L10_except_error:;
+2922: self._pattern_view = view # save references
__PYX_XDEC_MEMVIEW(&__pyx_v_self->_pattern_view, 0); __PYX_INC_MEMVIEW(&__pyx_v_view, 0); __pyx_v_self->_pattern_view = __pyx_v_view;
+2923: self._pattern_size = len(view)
__pyx_t_13 = __Pyx_MemoryView_Len(__pyx_v_view);
__pyx_v_self->_pattern_size = __pyx_t_13;
+2924: if self._pattern_size:
__pyx_t_1 = (__pyx_v_self->_pattern_size != 0); if (likely(__pyx_t_1)) { /* … */ goto __pyx_L16; }
2925: with cython.boundscheck(False):
+2926: self._pattern_data = &view[0]
__pyx_t_14 = 0; if (__pyx_t_14 < 0) __pyx_t_14 += __pyx_v_view.shape[0]; __pyx_v_self->_pattern_data = (&(*((byte_t const *) ( /* dim=0 */ (__pyx_v_view.data + __pyx_t_14 * __pyx_v_view.strides[0]) ))));
+2927: if not forward:
__pyx_t_1 = ((!(__pyx_v_forward != 0)) != 0); if (__pyx_t_1) { /* … */ }
+2928: self._pattern_offset = self._pattern_size - 1
__pyx_v_self->_pattern_offset = (__pyx_v_self->_pattern_size - 1);
2929: else:
+2930: raise ValueError('non-empty pattern required')
/*else*/ { __pyx_t_11 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__15, NULL); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 2930, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_11); __Pyx_Raise(__pyx_t_11, 0, 0, 0); __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; __PYX_ERR(0, 2930, __pyx_L1_error) } __pyx_L16:; } __pyx_L7:; /* … */ __pyx_tuple__15 = PyTuple_Pack(1, __pyx_kp_u_non_empty_pattern_required); if (unlikely(!__pyx_tuple__15)) __PYX_ERR(0, 2930, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__15); __Pyx_GIVEREF(__pyx_tuple__15);
2931:
+2932: self._forward = forward
__pyx_v_self->_forward = __pyx_v_forward;
+2933: self._infinite = infinite
__pyx_v_self->_infinite = __pyx_v_infinite;
+2934: self._start = start
__pyx_v_self->_start = __pyx_v_start;
+2935: self._endex = endex
__pyx_v_self->_endex = __pyx_v_endex;
+2936: self._address = start if forward else endex
if ((__pyx_v_forward != 0)) { __pyx_t_15 = __pyx_v_start; } else { __pyx_t_15 = __pyx_v_endex; } __pyx_v_self->_address = __pyx_t_15;
2937:
+2938: self._memory = memory # keep reference
__Pyx_INCREF(((PyObject *)__pyx_v_memory)); __Pyx_GIVEREF(((PyObject *)__pyx_v_memory)); __Pyx_GOTREF(__pyx_v_self->_memory); __Pyx_DECREF(((PyObject *)__pyx_v_self->_memory)); __pyx_v_self->_memory = __pyx_v_memory;
+2939: self._blocks = memory._
__pyx_t_16 = __pyx_v_memory->_; __pyx_v_self->_blocks = __pyx_t_16;
+2940: self._block_count = Rack_Length(self._blocks)
__pyx_v_self->_block_count = __pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_self->_blocks);
2941:
+2942: if self._block_count:
__pyx_t_1 = (__pyx_v_self->_block_count != 0); if (__pyx_t_1) { /* … */ }
+2943: if forward:
__pyx_t_1 = (__pyx_v_forward != 0); if (__pyx_t_1) { /* … */ goto __pyx_L19; }
+2944: self._block_index = Rack_IndexStart(self._blocks, start)
__pyx_t_14 = __pyx_f_10bytesparse_2_c_Rack_IndexStart(__pyx_v_self->_blocks, __pyx_v_start); if (unlikely(__pyx_t_14 == ((Py_ssize_t)-2L))) __PYX_ERR(0, 2944, __pyx_L1_error)
__pyx_v_self->_block_index = __pyx_t_14;
+2945: if self._block_index < self._block_count:
__pyx_t_1 = ((__pyx_v_self->_block_index < __pyx_v_self->_block_count) != 0); if (__pyx_t_1) { /* … */ }
+2946: block = Rack_Get_(self._blocks, self._block_index)
__pyx_t_17 = __pyx_f_10bytesparse_2_c_Rack_Get_(__pyx_v_self->_blocks, __pyx_v_self->_block_index); if (unlikely(__pyx_t_17 == ((Block_ *)NULL) && PyErr_Occurred())) __PYX_ERR(0, 2946, __pyx_L1_error) __pyx_v_block = __pyx_t_17;
+2947: self._block_start = Block_Start(block)
__pyx_v_self->_block_start = __pyx_f_10bytesparse_2_c_Block_Start(__pyx_v_block);
+2948: self._block_endex = Block_Endex(block)
__pyx_v_self->_block_endex = __pyx_f_10bytesparse_2_c_Block_Endex(__pyx_v_block);
2949:
+2950: offset = start if start >= self._block_start else self._block_start
if (((__pyx_v_start >= __pyx_v_self->_block_start) != 0)) { __pyx_t_15 = __pyx_v_start; } else { __pyx_t_15 = __pyx_v_self->_block_start; } __pyx_v_offset = __pyx_t_15;
+2951: if offset > self._block_endex:
__pyx_t_1 = ((__pyx_v_offset > __pyx_v_self->_block_endex) != 0); if (__pyx_t_1) { /* … */ }
+2952: offset = self._block_endex
__pyx_t_15 = __pyx_v_self->_block_endex; __pyx_v_offset = __pyx_t_15;
+2953: offset -= self._block_start
__pyx_v_offset = (__pyx_v_offset - __pyx_v_self->_block_start);
+2954: CheckAddrToSizeU(offset)
__pyx_t_18 = __pyx_f_10bytesparse_2_c_CheckAddrToSizeU(__pyx_v_offset); if (unlikely(__pyx_t_18 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 2954, __pyx_L1_error)
2955:
+2956: block = Block_Acquire(block)
__pyx_t_17 = __pyx_f_10bytesparse_2_c_Block_Acquire(__pyx_v_block); if (unlikely(__pyx_t_17 == ((Block_ *)NULL))) __PYX_ERR(0, 2956, __pyx_L1_error)
__pyx_v_block = __pyx_t_17;
+2957: self._block = block
__pyx_v_self->_block = __pyx_v_block;
+2958: self._block_ptr = Block_At__(block, <size_t>offset)
__pyx_v_self->_block_ptr = __pyx_f_10bytesparse_2_c_Block_At__(__pyx_v_block, ((size_t)__pyx_v_offset));
2959:
2960: else:
+2961: self._block_index = Rack_IndexEndex(self._blocks, endex)
/*else*/ {
__pyx_t_14 = __pyx_f_10bytesparse_2_c_Rack_IndexEndex(__pyx_v_self->_blocks, __pyx_v_endex); if (unlikely(__pyx_t_14 == ((Py_ssize_t)-2L))) __PYX_ERR(0, 2961, __pyx_L1_error)
__pyx_v_self->_block_index = __pyx_t_14;
+2962: if self._block_index:
__pyx_t_1 = (__pyx_v_self->_block_index != 0); if (__pyx_t_1) { /* … */ } } __pyx_L19:;
+2963: block = Rack_Get_(self._blocks, self._block_index - 1)
__pyx_t_17 = __pyx_f_10bytesparse_2_c_Rack_Get_(__pyx_v_self->_blocks, (__pyx_v_self->_block_index - 1)); if (unlikely(__pyx_t_17 == ((Block_ *)NULL) && PyErr_Occurred())) __PYX_ERR(0, 2963, __pyx_L1_error) __pyx_v_block = __pyx_t_17;
+2964: self._block_start = Block_Start(block)
__pyx_v_self->_block_start = __pyx_f_10bytesparse_2_c_Block_Start(__pyx_v_block);
+2965: self._block_endex = Block_Endex(block)
__pyx_v_self->_block_endex = __pyx_f_10bytesparse_2_c_Block_Endex(__pyx_v_block);
2966:
+2967: offset = endex if endex >= self._block_start else self._block_start
if (((__pyx_v_endex >= __pyx_v_self->_block_start) != 0)) { __pyx_t_15 = __pyx_v_endex; } else { __pyx_t_15 = __pyx_v_self->_block_start; } __pyx_v_offset = __pyx_t_15;
+2968: if offset > self._block_endex:
__pyx_t_1 = ((__pyx_v_offset > __pyx_v_self->_block_endex) != 0); if (__pyx_t_1) { /* … */ }
+2969: offset = self._block_endex
__pyx_t_15 = __pyx_v_self->_block_endex; __pyx_v_offset = __pyx_t_15;
+2970: offset -= self._block_start
__pyx_v_offset = (__pyx_v_offset - __pyx_v_self->_block_start);
+2971: CheckAddrToSizeU(offset)
__pyx_t_18 = __pyx_f_10bytesparse_2_c_CheckAddrToSizeU(__pyx_v_offset); if (unlikely(__pyx_t_18 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 2971, __pyx_L1_error)
2972:
+2973: block = Block_Acquire(block)
__pyx_t_17 = __pyx_f_10bytesparse_2_c_Block_Acquire(__pyx_v_block); if (unlikely(__pyx_t_17 == ((Block_ *)NULL))) __PYX_ERR(0, 2973, __pyx_L1_error)
__pyx_v_block = __pyx_t_17;
+2974: self._block = block
__pyx_v_self->_block = __pyx_v_block;
+2975: self._block_ptr = Block_At__(block, <size_t>offset)
__pyx_v_self->_block_ptr = __pyx_f_10bytesparse_2_c_Block_At__(__pyx_v_block, ((size_t)__pyx_v_offset));
2976:
+2977: def __len__(self):
/* Python wrapper */ static Py_ssize_t __pyx_pw_10bytesparse_2_c_5Rover_7__len__(PyObject *__pyx_v_self); /*proto*/ static Py_ssize_t __pyx_pw_10bytesparse_2_c_5Rover_7__len__(PyObject *__pyx_v_self) { Py_ssize_t __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__len__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_5Rover_6__len__(((struct __pyx_obj_10bytesparse_2_c_Rover *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static Py_ssize_t __pyx_pf_10bytesparse_2_c_5Rover_6__len__(struct __pyx_obj_10bytesparse_2_c_Rover *__pyx_v_self) { Py_ssize_t __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__len__", 0); /* … */ /* function exit code */ __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+2978: return self._endex - self._start
__pyx_r = (__pyx_v_self->_endex - __pyx_v_self->_start); goto __pyx_L0;
2979:
+2980: cdef int next_(self) except -2:
static int __pyx_f_10bytesparse_2_c_5Rover_next_(struct __pyx_obj_10bytesparse_2_c_Rover *__pyx_v_self) { Block_ *__pyx_v_block; int __pyx_v_value; int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("next_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_6); __Pyx_XDECREF(__pyx_t_7); __Pyx_XDECREF(__pyx_t_8); __Pyx_AddTraceback("bytesparse._c.Rover.next_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -2; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
2981: cdef:
2982: Block_* block
+2983: int value = -1
__pyx_v_value = -1;
2984:
+2985: try:
{ /*try:*/ { /* … */ } __pyx_L3_error:; __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; /* … */ __Pyx_XGIVEREF(__pyx_t_1); __Pyx_XGIVEREF(__pyx_t_2); __Pyx_XGIVEREF(__pyx_t_3); __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); goto __pyx_L1_error; __pyx_L7_try_return:; __Pyx_XGIVEREF(__pyx_t_1); __Pyx_XGIVEREF(__pyx_t_2); __Pyx_XGIVEREF(__pyx_t_3); __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); goto __pyx_L0; }
+2986: if self._forward:
__pyx_t_4 = (__pyx_v_self->_forward != 0); if (__pyx_t_4) { /* … */ goto __pyx_L9; }
+2987: while True: # loop to move to the next block when necessary
while (1) {
+2988: if self._address < self._endex:
__pyx_t_4 = ((__pyx_v_self->_address < __pyx_v_self->_endex) != 0); if (__pyx_t_4) { /* … */ }
+2989: if self._block_index < self._block_count:
__pyx_t_4 = ((__pyx_v_self->_block_index < __pyx_v_self->_block_count) != 0); if (__pyx_t_4) { /* … */ }
+2990: if self._address < self._block_start:
__pyx_t_4 = ((__pyx_v_self->_address < __pyx_v_self->_block_start) != 0); if (__pyx_t_4) { /* … */ }
+2991: self._address += 1
__pyx_v_self->_address = (__pyx_v_self->_address + 1);
+2992: if self._pattern_size:
__pyx_t_4 = (__pyx_v_self->_pattern_size != 0); if (__pyx_t_4) { /* … */ goto __pyx_L15; }
+2993: value = <int><unsigned>self._pattern_data[self._pattern_offset]
__pyx_v_value = ((int)((unsigned int)(__pyx_v_self->_pattern_data[__pyx_v_self->_pattern_offset])));
2994: else:
+2995: value = -1
/*else*/ { __pyx_v_value = -1; } __pyx_L15:;
+2996: break
goto __pyx_L11_break;
2997:
+2998: elif self._address < self._block_endex:
__pyx_t_4 = ((__pyx_v_self->_address < __pyx_v_self->_block_endex) != 0); if (__pyx_t_4) { /* … */ }
+2999: self._address += 1
__pyx_v_self->_address = (__pyx_v_self->_address + 1);
+3000: value = self._block_ptr[0]
__pyx_v_value = (__pyx_v_self->_block_ptr[0]);
+3001: self._block_ptr += 1
__pyx_v_self->_block_ptr = (__pyx_v_self->_block_ptr + 1);
+3002: break
goto __pyx_L11_break;
3003:
3004: else:
+3005: self._block_index += 1
/*else*/ { __pyx_v_self->_block_index = (__pyx_v_self->_block_index + 1);
+3006: if self._block_index < self._block_count:
__pyx_t_4 = ((__pyx_v_self->_block_index < __pyx_v_self->_block_count) != 0); if (__pyx_t_4) { /* … */ }
+3007: self._block = Block_Release(self._block)
__pyx_v_self->_block = __pyx_f_10bytesparse_2_c_Block_Release(__pyx_v_self->_block);
+3008: self._block = NULL
__pyx_v_self->_block = NULL;
+3009: block = Rack_Get_(self._blocks, self._block_index)
__pyx_t_5 = __pyx_f_10bytesparse_2_c_Rack_Get_(__pyx_v_self->_blocks, __pyx_v_self->_block_index); if (unlikely(__pyx_t_5 == ((Block_ *)NULL) && PyErr_Occurred())) __PYX_ERR(0, 3009, __pyx_L3_error) __pyx_v_block = __pyx_t_5;
+3010: block = Block_Acquire(block)
__pyx_t_5 = __pyx_f_10bytesparse_2_c_Block_Acquire(__pyx_v_block); if (unlikely(__pyx_t_5 == ((Block_ *)NULL))) __PYX_ERR(0, 3010, __pyx_L3_error)
__pyx_v_block = __pyx_t_5;
+3011: self._block = block
__pyx_v_self->_block = __pyx_v_block;
+3012: self._block_start = Block_Start(block)
__pyx_v_self->_block_start = __pyx_f_10bytesparse_2_c_Block_Start(__pyx_v_block);
+3013: self._block_endex = Block_Endex(block)
__pyx_v_self->_block_endex = __pyx_f_10bytesparse_2_c_Block_Endex(__pyx_v_block);
+3014: self._block_ptr = Block_At_(block, 0)
__pyx_v_self->_block_ptr = __pyx_f_10bytesparse_2_c_Block_At_(__pyx_v_block, 0);
+3015: continue
goto __pyx_L10_continue; }
3016: else:
+3017: self._address += 1
/*else*/ { __pyx_v_self->_address = (__pyx_v_self->_address + 1);
+3018: if self._pattern_size:
__pyx_t_4 = (__pyx_v_self->_pattern_size != 0); if (__pyx_t_4) { /* … */ goto __pyx_L17; }
+3019: value = <int><unsigned>self._pattern_data[self._pattern_offset]
__pyx_v_value = ((int)((unsigned int)(__pyx_v_self->_pattern_data[__pyx_v_self->_pattern_offset])));
3020: else:
+3021: value = -1
/*else*/ { __pyx_v_value = -1; } __pyx_L17:;
+3022: break
goto __pyx_L11_break; }
3023:
+3024: elif self._infinite:
__pyx_t_4 = (__pyx_v_self->_infinite != 0); if (likely(__pyx_t_4)) { /* … */ goto __pyx_L12; }
+3025: if self._pattern_size:
__pyx_t_4 = (__pyx_v_self->_pattern_size != 0); if (__pyx_t_4) { /* … */ goto __pyx_L18; }
+3026: value = <int><unsigned>self._pattern_data[self._pattern_offset]
__pyx_v_value = ((int)((unsigned int)(__pyx_v_self->_pattern_data[__pyx_v_self->_pattern_offset])));
3027: else:
+3028: value = -1
/*else*/ { __pyx_v_value = -1; } __pyx_L18:;
3029:
3030: else:
+3031: raise StopIteration()
/*else*/ { __pyx_t_6 = __Pyx_PyObject_CallNoArg(__pyx_builtin_StopIteration); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 3031, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_6); __Pyx_Raise(__pyx_t_6, 0, 0, 0); __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __PYX_ERR(0, 3031, __pyx_L3_error) } __pyx_L12:; __pyx_L10_continue:; } __pyx_L11_break:;
3032: else:
+3033: while True: # loop to move to the next block when necessary
/*else*/ { while (1) {
+3034: if self._address > self._start:
__pyx_t_4 = ((__pyx_v_self->_address > __pyx_v_self->_start) != 0); if (__pyx_t_4) { /* … */ }
+3035: if self._block_index:
__pyx_t_4 = (__pyx_v_self->_block_index != 0); if (__pyx_t_4) { /* … */ }
+3036: if self._address > self._block_endex:
__pyx_t_4 = ((__pyx_v_self->_address > __pyx_v_self->_block_endex) != 0); if (__pyx_t_4) { /* … */ }
+3037: self._address -= 1
__pyx_v_self->_address = (__pyx_v_self->_address - 1);
+3038: if self._pattern_size:
__pyx_t_4 = (__pyx_v_self->_pattern_size != 0); if (__pyx_t_4) { /* … */ goto __pyx_L24; }
+3039: value = <int><unsigned>self._pattern_data[self._pattern_offset]
__pyx_v_value = ((int)((unsigned int)(__pyx_v_self->_pattern_data[__pyx_v_self->_pattern_offset])));
3040: else:
+3041: value = -1
/*else*/ { __pyx_v_value = -1; } __pyx_L24:;
+3042: break
goto __pyx_L20_break;
3043:
+3044: elif self._address > self._block_start:
__pyx_t_4 = ((__pyx_v_self->_address > __pyx_v_self->_block_start) != 0); if (__pyx_t_4) { /* … */ }
+3045: self._address -= 1
__pyx_v_self->_address = (__pyx_v_self->_address - 1);
+3046: self._block_ptr -= 1
__pyx_v_self->_block_ptr = (__pyx_v_self->_block_ptr - 1);
+3047: value = self._block_ptr[0]
__pyx_v_value = (__pyx_v_self->_block_ptr[0]);
+3048: break
goto __pyx_L20_break;
3049:
3050: else:
+3051: self._block_index -= 1
/*else*/ { __pyx_v_self->_block_index = (__pyx_v_self->_block_index - 1);
+3052: if self._block_index:
__pyx_t_4 = (__pyx_v_self->_block_index != 0); if (__pyx_t_4) { /* … */ }
+3053: self._block = Block_Release(self._block)
__pyx_v_self->_block = __pyx_f_10bytesparse_2_c_Block_Release(__pyx_v_self->_block);
+3054: self._block = NULL
__pyx_v_self->_block = NULL;
+3055: block = Rack_Get_(self._blocks, self._block_index - 1)
__pyx_t_5 = __pyx_f_10bytesparse_2_c_Rack_Get_(__pyx_v_self->_blocks, (__pyx_v_self->_block_index - 1)); if (unlikely(__pyx_t_5 == ((Block_ *)NULL) && PyErr_Occurred())) __PYX_ERR(0, 3055, __pyx_L3_error) __pyx_v_block = __pyx_t_5;
+3056: block = Block_Acquire(block)
__pyx_t_5 = __pyx_f_10bytesparse_2_c_Block_Acquire(__pyx_v_block); if (unlikely(__pyx_t_5 == ((Block_ *)NULL))) __PYX_ERR(0, 3056, __pyx_L3_error)
__pyx_v_block = __pyx_t_5;
+3057: self._block = block
__pyx_v_self->_block = __pyx_v_block;
+3058: self._block_start = Block_Start(block)
__pyx_v_self->_block_start = __pyx_f_10bytesparse_2_c_Block_Start(__pyx_v_block);
+3059: self._block_endex = Block_Endex(block)
__pyx_v_self->_block_endex = __pyx_f_10bytesparse_2_c_Block_Endex(__pyx_v_block);
+3060: self._block_ptr = Block_At__(block, Block_Length(block))
__pyx_v_self->_block_ptr = __pyx_f_10bytesparse_2_c_Block_At__(__pyx_v_block, __pyx_f_10bytesparse_2_c_Block_Length(__pyx_v_block));
+3061: value = -1
__pyx_v_value = -1;
+3062: continue
goto __pyx_L19_continue; }
3063: else:
+3064: self._address -= 1
/*else*/ { __pyx_v_self->_address = (__pyx_v_self->_address - 1);
+3065: if self._pattern_size:
__pyx_t_4 = (__pyx_v_self->_pattern_size != 0); if (__pyx_t_4) { /* … */ goto __pyx_L26; }
+3066: value = <int><unsigned>self._pattern_data[self._pattern_offset]
__pyx_v_value = ((int)((unsigned int)(__pyx_v_self->_pattern_data[__pyx_v_self->_pattern_offset])));
3067: else:
+3068: value = -1
/*else*/ { __pyx_v_value = -1; } __pyx_L26:;
+3069: break
goto __pyx_L20_break; }
3070:
+3071: elif self._infinite:
__pyx_t_4 = (__pyx_v_self->_infinite != 0); if (likely(__pyx_t_4)) { /* … */ goto __pyx_L21; }
+3072: if self._pattern_size:
__pyx_t_4 = (__pyx_v_self->_pattern_size != 0); if (__pyx_t_4) { /* … */ goto __pyx_L27; }
+3073: value = <int><unsigned>self._pattern_data[self._pattern_offset]
__pyx_v_value = ((int)((unsigned int)(__pyx_v_self->_pattern_data[__pyx_v_self->_pattern_offset])));
3074: else:
+3075: value = -1
/*else*/ { __pyx_v_value = -1; } __pyx_L27:;
3076:
3077: else:
+3078: raise StopIteration()
/*else*/ { __pyx_t_6 = __Pyx_PyObject_CallNoArg(__pyx_builtin_StopIteration); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 3078, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_6); __Pyx_Raise(__pyx_t_6, 0, 0, 0); __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __PYX_ERR(0, 3078, __pyx_L3_error) } __pyx_L21:; __pyx_L19_continue:; } __pyx_L20_break:; } __pyx_L9:;
3079:
+3080: if self._pattern_size:
__pyx_t_4 = (__pyx_v_self->_pattern_size != 0); if (__pyx_t_4) { /* … */ }
+3081: if self._forward:
__pyx_t_4 = (__pyx_v_self->_forward != 0); if (__pyx_t_4) { /* … */ goto __pyx_L29; }
+3082: if self._pattern_offset < self._pattern_size - 1:
__pyx_t_4 = ((__pyx_v_self->_pattern_offset < (__pyx_v_self->_pattern_size - 1)) != 0); if (__pyx_t_4) { /* … */ goto __pyx_L30; }
+3083: self._pattern_offset += 1
__pyx_v_self->_pattern_offset = (__pyx_v_self->_pattern_offset + 1);
3084: else:
+3085: self._pattern_offset = 0
/*else*/ { __pyx_v_self->_pattern_offset = 0; } __pyx_L30:;
3086: else:
+3087: if self._pattern_offset > 0:
/*else*/ { __pyx_t_4 = ((__pyx_v_self->_pattern_offset > 0) != 0); if (__pyx_t_4) { /* … */ goto __pyx_L31; }
+3088: self._pattern_offset -= 1
__pyx_v_self->_pattern_offset = (__pyx_v_self->_pattern_offset - 1);
3089: else:
+3090: self._pattern_offset = self._pattern_size - 1
/*else*/ { __pyx_v_self->_pattern_offset = (__pyx_v_self->_pattern_size - 1); } __pyx_L31:; } __pyx_L29:;
3091:
+3092: return value
__pyx_r = __pyx_v_value; goto __pyx_L7_try_return;
3093:
+3094: except:
/*except:*/ { __Pyx_AddTraceback("bytesparse._c.Rover.next_", __pyx_clineno, __pyx_lineno, __pyx_filename); if (__Pyx_GetException(&__pyx_t_6, &__pyx_t_7, &__pyx_t_8) < 0) __PYX_ERR(0, 3094, __pyx_L5_except_error) __Pyx_GOTREF(__pyx_t_6); __Pyx_GOTREF(__pyx_t_7); __Pyx_GOTREF(__pyx_t_8);
+3095: self._block = Block_Release(self._block) # preempt
__pyx_v_self->_block = __pyx_f_10bytesparse_2_c_Block_Release(__pyx_v_self->_block);
+3096: raise
__Pyx_GIVEREF(__pyx_t_6); __Pyx_GIVEREF(__pyx_t_7); __Pyx_XGIVEREF(__pyx_t_8); __Pyx_ErrRestoreWithState(__pyx_t_6, __pyx_t_7, __pyx_t_8); __pyx_t_6 = 0; __pyx_t_7 = 0; __pyx_t_8 = 0; __PYX_ERR(0, 3096, __pyx_L5_except_error) } __pyx_L5_except_error:;
3097:
+3098: def __next__(self):
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_5Rover_9__next__(PyObject *__pyx_v_self); /*proto*/ static PyObject *__pyx_pw_10bytesparse_2_c_5Rover_9__next__(PyObject *__pyx_v_self) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__next__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_5Rover_8__next__(((struct __pyx_obj_10bytesparse_2_c_Rover *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_5Rover_8__next__(struct __pyx_obj_10bytesparse_2_c_Rover *__pyx_v_self) { int __pyx_v_value; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__next__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("bytesparse._c.Rover.__next__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_gb_10bytesparse_2_c_5Rover_12generator(__pyx_CoroutineObject *__pyx_generator, CYTHON_UNUSED PyThreadState *__pyx_tstate, PyObject *__pyx_sent_value); /* proto */
3099: cdef:
3100: int value
3101:
+3102: value = self.next_()
__pyx_t_1 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Rover *)__pyx_v_self->__pyx_vtab)->next_(__pyx_v_self); if (unlikely(__pyx_t_1 == ((int)-2))) __PYX_ERR(0, 3102, __pyx_L1_error)
__pyx_v_value = __pyx_t_1;
+3103: return None if value < 0 else value
__Pyx_XDECREF(__pyx_r); if (((__pyx_v_value < 0) != 0)) { __Pyx_INCREF(Py_None); __pyx_t_2 = Py_None; } else { __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_value); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 3103, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_2 = __pyx_t_3; __pyx_t_3 = 0; } __pyx_r = __pyx_t_2; __pyx_t_2 = 0; goto __pyx_L0;
3104:
+3105: def __iter__(self):
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_5Rover_11__iter__(PyObject *__pyx_v_self); /*proto*/ static PyObject *__pyx_pw_10bytesparse_2_c_5Rover_11__iter__(PyObject *__pyx_v_self) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__iter__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_5Rover_10__iter__(((struct __pyx_obj_10bytesparse_2_c_Rover *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_5Rover_10__iter__(struct __pyx_obj_10bytesparse_2_c_Rover *__pyx_v_self) { struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct____iter__ *__pyx_cur_scope; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__iter__", 0); __pyx_cur_scope = (struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct____iter__ *)__pyx_tp_new_10bytesparse_2_c___pyx_scope_struct____iter__(__pyx_ptype_10bytesparse_2_c___pyx_scope_struct____iter__, __pyx_empty_tuple, NULL); if (unlikely(!__pyx_cur_scope)) { __pyx_cur_scope = ((struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct____iter__ *)Py_None); __Pyx_INCREF(Py_None); __PYX_ERR(0, 3105, __pyx_L1_error) } else { __Pyx_GOTREF(__pyx_cur_scope); } __pyx_cur_scope->__pyx_v_self = __pyx_v_self; __Pyx_INCREF((PyObject *)__pyx_cur_scope->__pyx_v_self); __Pyx_GIVEREF((PyObject *)__pyx_cur_scope->__pyx_v_self); { __pyx_CoroutineObject *gen = __Pyx_Generator_New((__pyx_coroutine_body_t) __pyx_gb_10bytesparse_2_c_5Rover_12generator, NULL, (PyObject *) __pyx_cur_scope, __pyx_n_s_iter, __pyx_n_s_Rover___iter, __pyx_n_s_bytesparse__c); if (unlikely(!gen)) __PYX_ERR(0, 3105, __pyx_L1_error) __Pyx_DECREF(__pyx_cur_scope); __Pyx_RefNannyFinishContext(); return (PyObject *) gen; } /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Rover.__iter__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __Pyx_DECREF(((PyObject *)__pyx_cur_scope)); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_gb_10bytesparse_2_c_5Rover_12generator(__pyx_CoroutineObject *__pyx_generator, CYTHON_UNUSED PyThreadState *__pyx_tstate, PyObject *__pyx_sent_value) /* generator body */ { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__iter__", 0); __pyx_L3_first_run:; if (unlikely(!__pyx_sent_value)) __PYX_ERR(0, 3105, __pyx_L1_error) /* … */ /* function exit code */ PyErr_SetNone(PyExc_StopIteration); goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("__iter__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_L0:; __Pyx_XDECREF(__pyx_r); __pyx_r = 0; #if !CYTHON_USE_EXC_INFO_STACK __Pyx_Coroutine_ResetAndClearException(__pyx_generator); #endif __pyx_generator->resume_label = -1; __Pyx_Coroutine_clear((PyObject*)__pyx_generator); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* … */ struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct____iter__ { PyObject_HEAD struct __pyx_obj_10bytesparse_2_c_Rover *__pyx_v_self; int __pyx_v_value; };
3106: cdef:
3107: int value
3108:
+3109: while True:
while (1) {
+3110: value = self.next_()
__pyx_t_1 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Rover *)__pyx_cur_scope->__pyx_v_self->__pyx_vtab)->next_(__pyx_cur_scope->__pyx_v_self); if (unlikely(__pyx_t_1 == ((int)-2))) __PYX_ERR(0, 3110, __pyx_L1_error)
__pyx_cur_scope->__pyx_v_value = __pyx_t_1;
+3111: yield None if value < 0 else value
if (((__pyx_cur_scope->__pyx_v_value < 0) != 0)) { __Pyx_INCREF(Py_None); __pyx_t_2 = Py_None; } else { __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_cur_scope->__pyx_v_value); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 3111, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_2 = __pyx_t_3; __pyx_t_3 = 0; } __pyx_r = __pyx_t_2; __pyx_t_2 = 0; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); __Pyx_Coroutine_ResetAndClearException(__pyx_generator); /* return from generator, yielding value */ __pyx_generator->resume_label = 1; return __pyx_r; __pyx_L6_resume_from_yield:; if (unlikely(!__pyx_sent_value)) __PYX_ERR(0, 3111, __pyx_L1_error) } CYTHON_MAYBE_UNUSED_VAR(__pyx_cur_scope);
3112:
+3113: cdef vint dispose_(self) except -1:
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_5Rover_dispose_(struct __pyx_obj_10bytesparse_2_c_Rover *__pyx_v_self) { __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("dispose_", 0); /* … */ /* function exit code */ __pyx_r = 0; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+3114: self._address = self._endex if self._forward else self._start
if ((__pyx_v_self->_forward != 0)) { __pyx_t_1 = __pyx_v_self->_endex; } else { __pyx_t_1 = __pyx_v_self->_start; } __pyx_v_self->_address = __pyx_t_1;
+3115: self._block = Block_Release(self._block)
__pyx_v_self->_block = __pyx_f_10bytesparse_2_c_Block_Release(__pyx_v_self->_block);
+3116: self._memory = None
__Pyx_INCREF(Py_None); __Pyx_GIVEREF(Py_None); __Pyx_GOTREF(__pyx_v_self->_memory); __Pyx_DECREF(((PyObject *)__pyx_v_self->_memory)); __pyx_v_self->_memory = ((struct __pyx_obj_10bytesparse_2_c_Memory *)Py_None);
3117:
+3118: def dispose(self):
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_5Rover_14dispose(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ static char __pyx_doc_10bytesparse_2_c_5Rover_13dispose[] = "Rover.dispose(self)"; static PyObject *__pyx_pw_10bytesparse_2_c_5Rover_14dispose(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("dispose (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_5Rover_13dispose(((struct __pyx_obj_10bytesparse_2_c_Rover *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_5Rover_13dispose(struct __pyx_obj_10bytesparse_2_c_Rover *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("dispose", 0); /* … */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Rover.dispose", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
+3119: self.dispose_()
__pyx_t_1 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Rover *)__pyx_v_self->__pyx_vtab)->dispose_(__pyx_v_self); if (unlikely(__pyx_t_1 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 3119, __pyx_L1_error)
3120:
3121: @property
+3122: def forward(self) -> bool:
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_5Rover_7forward_1__get__(PyObject *__pyx_v_self); /*proto*/ static PyObject *__pyx_pw_10bytesparse_2_c_5Rover_7forward_1__get__(PyObject *__pyx_v_self) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_5Rover_7forward___get__(((struct __pyx_obj_10bytesparse_2_c_Rover *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_5Rover_7forward___get__(struct __pyx_obj_10bytesparse_2_c_Rover *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("bytesparse._c.Rover.forward.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
+3123: return self._forward
__Pyx_XDECREF(__pyx_r); __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_self->_forward); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3123, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0;
3124:
3125: @property
+3126: def infinite(self) -> bool:
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_5Rover_8infinite_1__get__(PyObject *__pyx_v_self); /*proto*/ static PyObject *__pyx_pw_10bytesparse_2_c_5Rover_8infinite_1__get__(PyObject *__pyx_v_self) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_5Rover_8infinite___get__(((struct __pyx_obj_10bytesparse_2_c_Rover *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_5Rover_8infinite___get__(struct __pyx_obj_10bytesparse_2_c_Rover *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("bytesparse._c.Rover.infinite.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
+3127: return self._infinite
__Pyx_XDECREF(__pyx_r); __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_self->_infinite); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3127, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0;
3128:
3129: @property
+3130: def address(self) -> Address:
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_5Rover_7address_1__get__(PyObject *__pyx_v_self); /*proto*/ static PyObject *__pyx_pw_10bytesparse_2_c_5Rover_7address_1__get__(PyObject *__pyx_v_self) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_5Rover_7address___get__(((struct __pyx_obj_10bytesparse_2_c_Rover *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_5Rover_7address___get__(struct __pyx_obj_10bytesparse_2_c_Rover *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("bytesparse._c.Rover.address.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
+3131: return self._address
__Pyx_XDECREF(__pyx_r); __pyx_t_1 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_self->_address); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3131, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0;
3132:
3133: @property
+3134: def start(self) -> Address:
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_5Rover_5start_1__get__(PyObject *__pyx_v_self); /*proto*/ static PyObject *__pyx_pw_10bytesparse_2_c_5Rover_5start_1__get__(PyObject *__pyx_v_self) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_5Rover_5start___get__(((struct __pyx_obj_10bytesparse_2_c_Rover *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_5Rover_5start___get__(struct __pyx_obj_10bytesparse_2_c_Rover *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("bytesparse._c.Rover.start.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
+3135: return self._start
__Pyx_XDECREF(__pyx_r); __pyx_t_1 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_self->_start); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3135, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0;
3136:
3137: @property
+3138: def endex(self) -> Address:
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_5Rover_5endex_1__get__(PyObject *__pyx_v_self); /*proto*/ static PyObject *__pyx_pw_10bytesparse_2_c_5Rover_5endex_1__get__(PyObject *__pyx_v_self) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_5Rover_5endex___get__(((struct __pyx_obj_10bytesparse_2_c_Rover *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_5Rover_5endex___get__(struct __pyx_obj_10bytesparse_2_c_Rover *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("bytesparse._c.Rover.endex.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
+3139: return self._endex
__Pyx_XDECREF(__pyx_r); __pyx_t_1 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_self->_endex); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3139, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0;
3140:
3141:
3142: # ---------------------------------------------------------------------------------------------------------------------
3143:
+3144: cdef class Memory:
struct __pyx_vtabstruct_10bytesparse_2_c_Memory { int (*__pyx___eq__same_)(struct __pyx_obj_10bytesparse_2_c_Memory *, struct __pyx_obj_10bytesparse_2_c_Memory *); int (*__pyx___eq__raw_)(struct __pyx_obj_10bytesparse_2_c_Memory *, size_t, byte_t const *); int (*__pyx___eq__view_)(struct __pyx_obj_10bytesparse_2_c_Memory *, __Pyx_memviewslice); int (*__pyx___eq__iter_)(struct __pyx_obj_10bytesparse_2_c_Memory *, PyObject *); saddr_t (*find_unbounded_)(struct __pyx_obj_10bytesparse_2_c_Memory *, size_t, byte_t const *); saddr_t (*find_bounded_)(struct __pyx_obj_10bytesparse_2_c_Memory *, size_t, byte_t const *, addr_t, addr_t); saddr_t (*rfind_unbounded_)(struct __pyx_obj_10bytesparse_2_c_Memory *, size_t, byte_t const *); saddr_t (*rfind_bounded_)(struct __pyx_obj_10bytesparse_2_c_Memory *, size_t, byte_t const *, addr_t, addr_t); addr_t (*count_unbounded_)(struct __pyx_obj_10bytesparse_2_c_Memory *, size_t, byte_t const *); addr_t (*count_bounded_)(struct __pyx_obj_10bytesparse_2_c_Memory *, size_t, byte_t const *, addr_t, addr_t); __pyx_t_10bytesparse_2_c_vint (*append_)(struct __pyx_obj_10bytesparse_2_c_Memory *, byte_t); __pyx_t_10bytesparse_2_c_vint (*extend_same_)(struct __pyx_obj_10bytesparse_2_c_Memory *, struct __pyx_obj_10bytesparse_2_c_Memory *, addr_t); __pyx_t_10bytesparse_2_c_vint (*extend_raw_)(struct __pyx_obj_10bytesparse_2_c_Memory *, size_t, byte_t const *, addr_t); int (*pop_last_)(struct __pyx_obj_10bytesparse_2_c_Memory *); int (*pop_at_)(struct __pyx_obj_10bytesparse_2_c_Memory *, addr_t); addr_t (*start_)(struct __pyx_obj_10bytesparse_2_c_Memory *); addr_t (*endex_)(struct __pyx_obj_10bytesparse_2_c_Memory *); __pyx_ctuple_addr_t__and_addr_t (*span_)(struct __pyx_obj_10bytesparse_2_c_Memory *); addr_t (*content_start_)(struct __pyx_obj_10bytesparse_2_c_Memory *); addr_t (*content_endex_)(struct __pyx_obj_10bytesparse_2_c_Memory *); __pyx_ctuple_addr_t__and_addr_t (*content_span_)(struct __pyx_obj_10bytesparse_2_c_Memory *); addr_t (*content_size_)(struct __pyx_obj_10bytesparse_2_c_Memory *); size_t (*content_parts_)(struct __pyx_obj_10bytesparse_2_c_Memory *); __pyx_t_10bytesparse_2_c_vint (*validate_)(struct __pyx_obj_10bytesparse_2_c_Memory *); __pyx_ctuple_addr_t__and_addr_t (*bound_)(struct __pyx_obj_10bytesparse_2_c_Memory *, PyObject *, PyObject *); int (*peek_)(struct __pyx_obj_10bytesparse_2_c_Memory *, addr_t); int (*poke_none_)(struct __pyx_obj_10bytesparse_2_c_Memory *, addr_t); __pyx_t_10bytesparse_2_c_vint (*poke_none__)(struct __pyx_obj_10bytesparse_2_c_Memory *, addr_t); int (*poke_)(struct __pyx_obj_10bytesparse_2_c_Memory *, addr_t, byte_t); struct __pyx_obj_10bytesparse_2_c_Memory *(*extract_)(struct __pyx_obj_10bytesparse_2_c_Memory *, addr_t, addr_t, size_t, byte_t const *, saddr_t, int); __pyx_t_10bytesparse_2_c_vint (*shift_left_)(struct __pyx_obj_10bytesparse_2_c_Memory *, addr_t, PyObject *); __pyx_t_10bytesparse_2_c_vint (*shift_right_)(struct __pyx_obj_10bytesparse_2_c_Memory *, addr_t, PyObject *); __pyx_t_10bytesparse_2_c_vint (*reserve_)(struct __pyx_obj_10bytesparse_2_c_Memory *, addr_t, addr_t, PyObject *); struct __pyx_obj_10bytesparse_2_c_BlockView *(*_memview)(struct __pyx_obj_10bytesparse_2_c_Memory *); struct __pyx_obj_10bytesparse_2_c_Memory *(*copy_)(struct __pyx_obj_10bytesparse_2_c_Memory *); int (*_insert_)(struct __pyx_obj_10bytesparse_2_c_Memory *, addr_t, size_t, byte_t const *, int); int (*_erase_)(struct __pyx_obj_10bytesparse_2_c_Memory *, addr_t, addr_t, int, int); __pyx_t_10bytesparse_2_c_vint (*insert_same_)(struct __pyx_obj_10bytesparse_2_c_Memory *, addr_t, struct __pyx_obj_10bytesparse_2_c_Memory *, PyObject *); __pyx_t_10bytesparse_2_c_vint (*insert_raw_)(struct __pyx_obj_10bytesparse_2_c_Memory *, addr_t, size_t, byte_t const *, PyObject *); __pyx_t_10bytesparse_2_c_vint (*delete_)(struct __pyx_obj_10bytesparse_2_c_Memory *, addr_t, addr_t, PyObject *); __pyx_t_10bytesparse_2_c_vint (*clear_)(struct __pyx_obj_10bytesparse_2_c_Memory *, addr_t, addr_t, PyObject *); __pyx_t_10bytesparse_2_c_vint (*_pretrim_start_)(struct __pyx_obj_10bytesparse_2_c_Memory *, addr_t, addr_t, PyObject *); __pyx_t_10bytesparse_2_c_vint (*_pretrim_endex_)(struct __pyx_obj_10bytesparse_2_c_Memory *, addr_t, addr_t, PyObject *); __pyx_t_10bytesparse_2_c_vint (*_crop_)(struct __pyx_obj_10bytesparse_2_c_Memory *, addr_t, addr_t, PyObject *); __pyx_t_10bytesparse_2_c_vint (*write_same_)(struct __pyx_obj_10bytesparse_2_c_Memory *, addr_t, struct __pyx_obj_10bytesparse_2_c_Memory *, int, PyObject *); __pyx_t_10bytesparse_2_c_vint (*write_raw_)(struct __pyx_obj_10bytesparse_2_c_Memory *, addr_t, size_t, byte_t const *, PyObject *); __pyx_t_10bytesparse_2_c_vint (*fill_)(struct __pyx_obj_10bytesparse_2_c_Memory *, addr_t, addr_t, Block_ **, PyObject *, addr_t); __pyx_t_10bytesparse_2_c_vint (*flood_)(struct __pyx_obj_10bytesparse_2_c_Memory *, addr_t, addr_t, Block_ **, PyObject *); }; static struct __pyx_vtabstruct_10bytesparse_2_c_Memory *__pyx_vtabptr_10bytesparse_2_c_Memory;
3145: r"""Virtual memory.
3146:
3147: This class is a handy wrapper around `blocks`, so that it can behave mostly
3148: like a :obj:`bytearray`, but on sparse chunks of data.
3149:
3150: Please look at examples of each method to get a glimpse of the features of
3151: this class.
3152:
3153: On creation, at most one of `memory`, `blocks`, or `data` can be specified.
3154:
3155: The Cython implementation limits the address range to that of the integral
3156: type ``uint_fast64_t``.
3157:
3158: Arguments:
3159: memory (Memory):
3160: An optional :obj:`Memory` to copy data from.
3161:
3162: data (bytes):
3163: An optional :obj:`bytes` string to create a single block of data.
3164:
3165: offset (int):
3166: Start address of the initial block, built if `data` is given.
3167:
3168: blocks (list of blocks):
3169: A sequence of non-overlapping blocks, sorted by address.
3170:
3171: start (int):
3172: Optional memory start address.
3173: Anything before will be trimmed away.
3174:
3175: endex (int):
3176: Optional memory exclusive end address.
3177: Anything at or after it will be trimmed away.
3178:
3179: copy (bool):
3180: Forces copy of provided input data.
3181:
3182: validate (bool):
3183: Validates the resulting :obj:`Memory` object.
3184:
3185: Raises:
3186: :obj:`ValueError`: More than one of `memory`, `data`, and `blocks`.
3187:
3188: Examples:
3189: >>> memory = Memory()
3190: >>> memory._blocks
3191: []
3192:
3193: >>> memory = Memory(data=b'Hello, World!', offset=5)
3194: >>> memory._blocks
3195: [[5, b'Hello, World!']]
3196: """
3197:
+3198: def __cinit__(self):
/* Python wrapper */ static int __pyx_pw_10bytesparse_2_c_6Memory_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static int __pyx_pw_10bytesparse_2_c_6Memory_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__cinit__ (wrapper)", 0); if (unlikely(PyTuple_GET_SIZE(__pyx_args) > 0)) { __Pyx_RaiseArgtupleInvalid("__cinit__", 1, 0, 0, PyTuple_GET_SIZE(__pyx_args)); return -1;} if (unlikely(__pyx_kwds) && unlikely(PyDict_Size(__pyx_kwds) > 0) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__cinit__", 0))) return -1; __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory___cinit__(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static int __pyx_pf_10bytesparse_2_c_6Memory___cinit__(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__cinit__", 0); /* … */ /* function exit code */ __pyx_r = 0; __Pyx_RefNannyFinishContext(); return __pyx_r; }
3199: r"""Cython constructor."""
+3200: self._ = NULL
__pyx_v_self->_ = NULL;
+3201: self._trim_start = 0
__pyx_v_self->_trim_start = 0;
+3202: self._trim_endex = ADDR_MAX
__pyx_v_self->_trim_endex = ADDR_MAX;
3203:
+3204: def __dealloc__(self):
/* Python wrapper */ static void __pyx_pw_10bytesparse_2_c_6Memory_3__dealloc__(PyObject *__pyx_v_self); /*proto*/ static void __pyx_pw_10bytesparse_2_c_6Memory_3__dealloc__(PyObject *__pyx_v_self) { __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__dealloc__ (wrapper)", 0); __pyx_pf_10bytesparse_2_c_6Memory_2__dealloc__(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); } static void __pyx_pf_10bytesparse_2_c_6Memory_2__dealloc__(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__dealloc__", 0); /* … */ /* function exit code */ __Pyx_RefNannyFinishContext(); }
3205: r"""Cython deallocation method."""
+3206: self._ = Rack_Free(self._)
__pyx_v_self->_ = __pyx_f_10bytesparse_2_c_Rack_Free(__pyx_v_self->_);
3207:
+3208: def __init__(
/* Python wrapper */ static int __pyx_pw_10bytesparse_2_c_6Memory_5__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static int __pyx_pw_10bytesparse_2_c_6Memory_5__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_memory = 0; PyObject *__pyx_v_data = 0; PyObject *__pyx_v_offset = 0; PyObject *__pyx_v_blocks = 0; PyObject *__pyx_v_start = 0; PyObject *__pyx_v_endex = 0; PyObject *__pyx_v_copy = 0; PyObject *__pyx_v_validate = 0; int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_memory,&__pyx_n_s_data,&__pyx_n_s_offset,&__pyx_n_s_blocks,&__pyx_n_s_start,&__pyx_n_s_endex,&__pyx_n_s_copy,&__pyx_n_s_validate,0}; PyObject* values[8] = {0,0,0,0,0,0,0,0}; /* … */ /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static int __pyx_pf_10bytesparse_2_c_6Memory_4__init__(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_memory, PyObject *__pyx_v_data, PyObject *__pyx_v_offset, PyObject *__pyx_v_blocks, PyObject *__pyx_v_start, PyObject *__pyx_v_endex, PyObject *__pyx_v_copy, PyObject *__pyx_v_validate) { struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_memory_ = 0; addr_t __pyx_v_start_; addr_t __pyx_v_endex_; addr_t __pyx_v_address; size_t __pyx_v_size; __Pyx_memviewslice __pyx_v_view = { 0, 0, { 0 }, { 0 }, { 0 } }; byte_t const *__pyx_v_ptr; Block_ *__pyx_v_block; int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__init__", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_5); __PYX_XDEC_MEMVIEW(&__pyx_t_11, 1); __Pyx_XDECREF(__pyx_t_17); __Pyx_XDECREF(__pyx_t_18); __Pyx_AddTraceback("bytesparse._c.Memory.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_XDECREF((PyObject *)__pyx_v_memory_); __PYX_XDEC_MEMVIEW(&__pyx_v_view, 1); __Pyx_RefNannyFinishContext(); return __pyx_r; }
3209: self: 'Memory',
+3210: memory: Optional['Memory'] = None,
values[0] = ((PyObject *)Py_None);
+3211: data: Optional[AnyBytes] = None,
values[1] = ((PyObject *)Py_None); values[2] = ((PyObject *)__pyx_int_0);
3212: offset: Address = 0,
+3213: blocks: Optional[BlockList] = None,
values[3] = ((PyObject *)Py_None);
+3214: start: Optional[Address] = None,
values[4] = ((PyObject *)Py_None);
+3215: endex: Optional[Address] = None,
values[5] = ((PyObject *)Py_None);
+3216: copy: bool = True,
values[6] = ((PyObject *)Py_True);
+3217: validate: bool = True,
values[7] = ((PyObject *)Py_True); if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 8: values[7] = PyTuple_GET_ITEM(__pyx_args, 7); CYTHON_FALLTHROUGH; case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6); CYTHON_FALLTHROUGH; case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5); CYTHON_FALLTHROUGH; case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); CYTHON_FALLTHROUGH; case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); CYTHON_FALLTHROUGH; case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_memory); if (value) { values[0] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 1: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_data); if (value) { values[1] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 2: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_offset); if (value) { values[2] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 3: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_blocks); if (value) { values[3] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 4: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_start); if (value) { values[4] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 5: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_endex); if (value) { values[5] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 6: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_copy); if (value) { values[6] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 7: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_validate); if (value) { values[7] = value; kw_args--; } } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) __PYX_ERR(0, 3208, __pyx_L3_error) } } else { switch (PyTuple_GET_SIZE(__pyx_args)) { case 8: values[7] = PyTuple_GET_ITEM(__pyx_args, 7); CYTHON_FALLTHROUGH; case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6); CYTHON_FALLTHROUGH; case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5); CYTHON_FALLTHROUGH; case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); CYTHON_FALLTHROUGH; case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); CYTHON_FALLTHROUGH; case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } } __pyx_v_memory = values[0]; __pyx_v_data = values[1]; __pyx_v_offset = values[2]; __pyx_v_blocks = values[3]; __pyx_v_start = values[4]; __pyx_v_endex = values[5]; __pyx_v_copy = values[6]; __pyx_v_validate = values[7]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("__init__", 0, 0, 8, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 3208, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("bytesparse._c.Memory.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return -1; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_4__init__(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), __pyx_v_memory, __pyx_v_data, __pyx_v_offset, __pyx_v_blocks, __pyx_v_start, __pyx_v_endex, __pyx_v_copy, __pyx_v_validate);
3218: ):
3219: cdef:
3220: Memory memory_
3221: addr_t start_
3222: addr_t endex_
3223: addr_t address
3224: size_t size
3225: const byte_t[:] view
+3226: const byte_t* ptr = NULL
__pyx_v_ptr = NULL;
+3227: Block_* block = NULL
__pyx_v_block = NULL;
3228:
+3229: if (memory is not None) + (data is not None) + (blocks is not None) > 1:
__pyx_t_1 = (__pyx_v_memory != Py_None); __pyx_t_2 = (__pyx_v_data != Py_None); __pyx_t_3 = (__pyx_v_blocks != Py_None); __pyx_t_4 = ((((__pyx_t_1 + __pyx_t_2) + __pyx_t_3) > 1) != 0); if (unlikely(__pyx_t_4)) { /* … */ }
+3230: raise ValueError('only one of [memory, data, blocks] is allowed')
__pyx_t_5 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__18, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 3230, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __Pyx_Raise(__pyx_t_5, 0, 0, 0); __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; __PYX_ERR(0, 3230, __pyx_L1_error) /* … */ __pyx_tuple__18 = PyTuple_Pack(1, __pyx_kp_u_only_one_of_memory_data_blocks_i); if (unlikely(!__pyx_tuple__18)) __PYX_ERR(0, 3230, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__18); __Pyx_GIVEREF(__pyx_tuple__18);
3231:
+3232: start_ = 0 if start is None else <addr_t>start
__pyx_t_4 = (__pyx_v_start == Py_None); if ((__pyx_t_4 != 0)) { __pyx_t_6 = 0; } else { __pyx_t_7 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_start); if (unlikely((__pyx_t_7 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 3232, __pyx_L1_error) __pyx_t_6 = ((addr_t)__pyx_t_7); } __pyx_v_start_ = __pyx_t_6;
+3233: endex_ = ADDR_MAX if endex is None else <addr_t>endex
__pyx_t_4 = (__pyx_v_endex == Py_None); if ((__pyx_t_4 != 0)) { __pyx_t_6 = ADDR_MAX; } else { __pyx_t_7 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_endex); if (unlikely((__pyx_t_7 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 3233, __pyx_L1_error) __pyx_t_6 = ((addr_t)__pyx_t_7); } __pyx_v_endex_ = __pyx_t_6;
+3234: if endex_ < start_:
__pyx_t_4 = ((__pyx_v_endex_ < __pyx_v_start_) != 0); if (__pyx_t_4) { /* … */ }
+3235: endex_ = start_ # clamp negative length
__pyx_v_endex_ = __pyx_v_start_;
3236:
+3237: if memory is not None:
__pyx_t_4 = (__pyx_v_memory != Py_None); __pyx_t_3 = (__pyx_t_4 != 0); if (__pyx_t_3) { /* … */ goto __pyx_L5; }
+3238: memory_ = <Memory>memory
__pyx_t_5 = __pyx_v_memory;
__Pyx_INCREF(__pyx_t_5);
__pyx_v_memory_ = ((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_t_5);
__pyx_t_5 = 0;
3239:
+3240: if copy or offset:
__pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_v_copy); if (unlikely(__pyx_t_4 < 0)) __PYX_ERR(0, 3240, __pyx_L1_error) if (!__pyx_t_4) { } else { __pyx_t_3 = __pyx_t_4; goto __pyx_L7_bool_binop_done; } __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_v_offset); if (unlikely(__pyx_t_4 < 0)) __PYX_ERR(0, 3240, __pyx_L1_error) __pyx_t_3 = __pyx_t_4; __pyx_L7_bool_binop_done:; if (__pyx_t_3) { /* … */ goto __pyx_L6; }
+3241: self._ = Rack_Copy(memory_._)
__pyx_t_8 = __pyx_f_10bytesparse_2_c_Rack_Copy(__pyx_v_memory_->_); if (unlikely(__pyx_t_8 == ((Rack_ *)NULL))) __PYX_ERR(0, 3241, __pyx_L1_error)
__pyx_v_self->_ = __pyx_t_8;
+3242: self._ = Rack_Shift(self._, offset)
__pyx_t_9 = __Pyx_PyInt_As_int_fast64_t(__pyx_v_offset); if (unlikely((__pyx_t_9 == ((saddr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 3242, __pyx_L1_error) __pyx_t_8 = __pyx_f_10bytesparse_2_c_Rack_Shift(__pyx_v_self->_, __pyx_t_9); if (unlikely(__pyx_t_8 == ((Rack_ *)NULL))) __PYX_ERR(0, 3242, __pyx_L1_error) __pyx_v_self->_ = __pyx_t_8;
3243: else:
+3244: self._ = Rack_ShallowCopy(memory_._)
/*else*/ {
__pyx_t_8 = __pyx_f_10bytesparse_2_c_Rack_ShallowCopy(__pyx_v_memory_->_); if (unlikely(__pyx_t_8 == ((Rack_ *)NULL))) __PYX_ERR(0, 3244, __pyx_L1_error)
__pyx_v_self->_ = __pyx_t_8;
}
__pyx_L6:;
3245:
+3246: elif data is not None:
__pyx_t_3 = (__pyx_v_data != Py_None); __pyx_t_4 = (__pyx_t_3 != 0); if (__pyx_t_4) { /* … */ goto __pyx_L5; }
+3247: if offset < 0:
__pyx_t_5 = PyObject_RichCompare(__pyx_v_offset, __pyx_int_0, Py_LT); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 3247, __pyx_L1_error) __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_4 < 0)) __PYX_ERR(0, 3247, __pyx_L1_error) __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; if (unlikely(__pyx_t_4)) { /* … */ }
+3248: raise ValueError('negative offset')
__pyx_t_5 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__19, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 3248, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __Pyx_Raise(__pyx_t_5, 0, 0, 0); __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; __PYX_ERR(0, 3248, __pyx_L1_error) /* … */ __pyx_tuple__19 = PyTuple_Pack(1, __pyx_kp_u_negative_offset); if (unlikely(!__pyx_tuple__19)) __PYX_ERR(0, 3248, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__19); __Pyx_GIVEREF(__pyx_tuple__19);
3249:
+3250: address = <addr_t>offset
__pyx_t_6 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_offset); if (unlikely((__pyx_t_6 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 3250, __pyx_L1_error) __pyx_v_address = ((addr_t)__pyx_t_6);
+3251: size = <size_t>len(data)
__pyx_t_10 = PyObject_Length(__pyx_v_data); if (unlikely(__pyx_t_10 == ((Py_ssize_t)-1))) __PYX_ERR(0, 3251, __pyx_L1_error) __pyx_v_size = ((size_t)__pyx_t_10);
+3252: self._ = Rack_Alloc(0)
__pyx_t_8 = __pyx_f_10bytesparse_2_c_Rack_Alloc(0); if (unlikely(__pyx_t_8 == ((Rack_ *)NULL))) __PYX_ERR(0, 3252, __pyx_L1_error)
__pyx_v_self->_ = __pyx_t_8;
3253:
+3254: if size:
__pyx_t_4 = (__pyx_v_size != 0); if (__pyx_t_4) { /* … */ }
+3255: view = data
__pyx_t_11 = __Pyx_PyObject_to_MemoryviewSlice_ds_nn_byte_t__const__(__pyx_v_data, 0); if (unlikely(!__pyx_t_11.memview)) __PYX_ERR(0, 3255, __pyx_L1_error) __pyx_v_view = __pyx_t_11; __pyx_t_11.memview = NULL; __pyx_t_11.data = NULL;
3256: with cython.boundscheck(False):
+3257: ptr = &view[0]
__pyx_t_12 = 0; if (__pyx_t_12 < 0) __pyx_t_12 += __pyx_v_view.shape[0]; __pyx_v_ptr = (&(*((byte_t const *) ( /* dim=0 */ (__pyx_v_view.data + __pyx_t_12 * __pyx_v_view.strides[0]) ))));
+3258: block = Block_Create(address, size, ptr)
__pyx_t_13 = __pyx_f_10bytesparse_2_c_Block_Create(__pyx_v_address, __pyx_v_size, __pyx_v_ptr); if (unlikely(__pyx_t_13 == ((Block_ *)NULL))) __PYX_ERR(0, 3258, __pyx_L1_error)
__pyx_v_block = __pyx_t_13;
+3259: try:
{ /*try:*/ { /* … */ } __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; __Pyx_XDECREF(__pyx_t_15); __pyx_t_15 = 0; __Pyx_XDECREF(__pyx_t_16); __pyx_t_16 = 0; goto __pyx_L16_try_end; __pyx_L11_error:; __PYX_XDEC_MEMVIEW(&__pyx_t_11, 1); __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; /* … */ __Pyx_XGIVEREF(__pyx_t_14); __Pyx_XGIVEREF(__pyx_t_15); __Pyx_XGIVEREF(__pyx_t_16); __Pyx_ExceptionReset(__pyx_t_14, __pyx_t_15, __pyx_t_16); goto __pyx_L1_error; __pyx_L16_try_end:; }
+3260: self._ = Rack_Append(self._, block)
__pyx_t_8 = __pyx_f_10bytesparse_2_c_Rack_Append(__pyx_v_self->_, __pyx_v_block); if (unlikely(__pyx_t_8 == ((Rack_ *)NULL))) __PYX_ERR(0, 3260, __pyx_L11_error)
__pyx_v_self->_ = __pyx_t_8;
+3261: except:
/*except:*/ { __Pyx_AddTraceback("bytesparse._c.Memory.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); if (__Pyx_GetException(&__pyx_t_5, &__pyx_t_17, &__pyx_t_18) < 0) __PYX_ERR(0, 3261, __pyx_L13_except_error) __Pyx_GOTREF(__pyx_t_5); __Pyx_GOTREF(__pyx_t_17); __Pyx_GOTREF(__pyx_t_18);
+3262: block = Block_Free(block)
__pyx_v_block = __pyx_f_10bytesparse_2_c_Block_Free(__pyx_v_block);
+3263: raise
__Pyx_GIVEREF(__pyx_t_5); __Pyx_GIVEREF(__pyx_t_17); __Pyx_XGIVEREF(__pyx_t_18); __Pyx_ErrRestoreWithState(__pyx_t_5, __pyx_t_17, __pyx_t_18); __pyx_t_5 = 0; __pyx_t_17 = 0; __pyx_t_18 = 0; __PYX_ERR(0, 3263, __pyx_L13_except_error) } __pyx_L13_except_error:;
3264:
+3265: elif blocks:
__pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_v_blocks); if (unlikely(__pyx_t_4 < 0)) __PYX_ERR(0, 3265, __pyx_L1_error) if (__pyx_t_4) { /* … */ goto __pyx_L5; }
+3266: self._ = Rack_FromObject(blocks, offset)
__pyx_t_9 = __Pyx_PyInt_As_int_fast64_t(__pyx_v_offset); if (unlikely((__pyx_t_9 == ((saddr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 3266, __pyx_L1_error) __pyx_t_8 = __pyx_f_10bytesparse_2_c_Rack_FromObject(__pyx_v_blocks, __pyx_t_9); if (unlikely(__pyx_t_8 == ((Rack_ *)NULL))) __PYX_ERR(0, 3266, __pyx_L1_error) __pyx_v_self->_ = __pyx_t_8;
3267:
3268: else:
+3269: self._ = Rack_Alloc(0)
/*else*/ {
__pyx_t_8 = __pyx_f_10bytesparse_2_c_Rack_Alloc(0); if (unlikely(__pyx_t_8 == ((Rack_ *)NULL))) __PYX_ERR(0, 3269, __pyx_L1_error)
__pyx_v_self->_ = __pyx_t_8;
}
__pyx_L5:;
3270:
+3271: self._trim_start = start_
__pyx_v_self->_trim_start = __pyx_v_start_;
+3272: self._trim_endex = endex_
__pyx_v_self->_trim_endex = __pyx_v_endex_;
+3273: self._trim_start_ = start is not None
__pyx_t_4 = (__pyx_v_start != Py_None); __pyx_v_self->_trim_start_ = __pyx_t_4;
+3274: self._trim_endex_ = endex is not None
__pyx_t_4 = (__pyx_v_endex != Py_None); __pyx_v_self->_trim_endex_ = __pyx_t_4;
3275:
+3276: self._crop_(start_, endex_, None)
__pyx_t_19 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_crop_(__pyx_v_self, __pyx_v_start_, __pyx_v_endex_, ((PyObject*)Py_None)); if (unlikely(__pyx_t_19 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 3276, __pyx_L1_error)
3277:
+3278: if validate:
__pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_v_validate); if (unlikely(__pyx_t_4 < 0)) __PYX_ERR(0, 3278, __pyx_L1_error) if (__pyx_t_4) { /* … */ }
+3279: self.validate()
__pyx_t_17 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_validate); if (unlikely(!__pyx_t_17)) __PYX_ERR(0, 3279, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_17); __pyx_t_5 = NULL; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_17))) { __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_17); if (likely(__pyx_t_5)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_17); __Pyx_INCREF(__pyx_t_5); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_17, function); } } __pyx_t_18 = (__pyx_t_5) ? __Pyx_PyObject_CallOneArg(__pyx_t_17, __pyx_t_5) : __Pyx_PyObject_CallNoArg(__pyx_t_17); __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; if (unlikely(!__pyx_t_18)) __PYX_ERR(0, 3279, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_18); __Pyx_DECREF(__pyx_t_17); __pyx_t_17 = 0; __Pyx_DECREF(__pyx_t_18); __pyx_t_18 = 0;
3280:
+3281: def __repr__(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_7__repr__(PyObject *__pyx_v_self); /*proto*/ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_7__repr__(PyObject *__pyx_v_self) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__repr__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_6__repr__(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_6__repr__(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { addr_t __pyx_v_start; addr_t __pyx_v_endex; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__repr__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_4); __Pyx_XDECREF(__pyx_t_5); __Pyx_AddTraceback("bytesparse._c.Memory.__repr__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
3282: self: 'Memory',
3283: ) -> str:
3284: cdef:
+3285: addr_t start = self.start_()
__pyx_v_start = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->start_(__pyx_v_self);
+3286: addr_t endex = self.endex_()
__pyx_v_endex = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->endex_(__pyx_v_self);
3287:
+3288: return f'<{type(self).__name__}[0x{start:X}:0x{endex:X}]@0x{id(self):X}>'
__Pyx_XDECREF(__pyx_r); __pyx_t_1 = PyTuple_New(9); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3288, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_t_2 = 0; __pyx_t_3 = 127; __Pyx_INCREF(__pyx_kp_u__10); __pyx_t_2 += 1; __Pyx_GIVEREF(__pyx_kp_u__10); PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_kp_u__10); __pyx_t_4 = __Pyx_PyObject_GetAttrStr(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self))), __pyx_n_s_name); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 3288, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __pyx_t_5 = __Pyx_PyObject_FormatSimple(__pyx_t_4, __pyx_empty_unicode); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 3288, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __pyx_t_3 = (__Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_5) > __pyx_t_3) ? __Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_5) : __pyx_t_3; __pyx_t_2 += __Pyx_PyUnicode_GET_LENGTH(__pyx_t_5); __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_t_5); __pyx_t_5 = 0; __Pyx_INCREF(__pyx_kp_u_0x); __pyx_t_2 += 3; __Pyx_GIVEREF(__pyx_kp_u_0x); PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_kp_u_0x); __pyx_t_5 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_start); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 3288, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __pyx_t_4 = __Pyx_PyObject_Format(__pyx_t_5, __pyx_n_u_X); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 3288, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; __pyx_t_3 = (__Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_4) > __pyx_t_3) ? __Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_4) : __pyx_t_3; __pyx_t_2 += __Pyx_PyUnicode_GET_LENGTH(__pyx_t_4); __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_1, 3, __pyx_t_4); __pyx_t_4 = 0; __Pyx_INCREF(__pyx_kp_u_0x_2); __pyx_t_2 += 3; __Pyx_GIVEREF(__pyx_kp_u_0x_2); PyTuple_SET_ITEM(__pyx_t_1, 4, __pyx_kp_u_0x_2); __pyx_t_4 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_endex); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 3288, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __pyx_t_5 = __Pyx_PyObject_Format(__pyx_t_4, __pyx_n_u_X); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 3288, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __pyx_t_3 = (__Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_5) > __pyx_t_3) ? __Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_5) : __pyx_t_3; __pyx_t_2 += __Pyx_PyUnicode_GET_LENGTH(__pyx_t_5); __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_1, 5, __pyx_t_5); __pyx_t_5 = 0; __Pyx_INCREF(__pyx_kp_u_0x_3); __pyx_t_2 += 4; __Pyx_GIVEREF(__pyx_kp_u_0x_3); PyTuple_SET_ITEM(__pyx_t_1, 6, __pyx_kp_u_0x_3); __pyx_t_5 = __Pyx_PyObject_CallOneArg(__pyx_builtin_id, ((PyObject *)__pyx_v_self)); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 3288, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __pyx_t_4 = __Pyx_PyObject_Format(__pyx_t_5, __pyx_n_u_X); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 3288, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; __pyx_t_3 = (__Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_4) > __pyx_t_3) ? __Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_4) : __pyx_t_3; __pyx_t_2 += __Pyx_PyUnicode_GET_LENGTH(__pyx_t_4); __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_1, 7, __pyx_t_4); __pyx_t_4 = 0; __Pyx_INCREF(__pyx_kp_u__11); __pyx_t_2 += 1; __Pyx_GIVEREF(__pyx_kp_u__11); PyTuple_SET_ITEM(__pyx_t_1, 8, __pyx_kp_u__11); __pyx_t_4 = __Pyx_PyUnicode_Join(__pyx_t_1, 9, __pyx_t_2, __pyx_t_3); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 3288, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_r = __pyx_t_4; __pyx_t_4 = 0; goto __pyx_L0;
3289:
+3290: def __str__(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_9__str__(PyObject *__pyx_v_self); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_8__str__[] = "String representation.\n\n If :attr:`content_size` is lesser than ``STR_MAX_CONTENT_SIZE``, then\n the memory is represented as a list of blocks.\n\n If exceeding, it is equivalent to :meth:`__repr__`.\n\n\n Returns:\n str: String representation.\n\n Example:\n +---+---+---+---+---+---+---+---+---+---+---+\n | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10|\n +===+===+===+===+===+===+===+===+===+===+===+\n | |[A | B | C]| | | |[x | y | z]| |\n +---+---+---+---+---+---+---+---+---+---+---+\n\n >>> memory = Memory(blocks=[[1, b'ABC'], [7, b'xyz']])\n >>> memory._blocks\n 'ABCxyz'\n "; #if CYTHON_COMPILING_IN_CPYTHON struct wrapperbase __pyx_wrapperbase_10bytesparse_2_c_6Memory_8__str__; #endif static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_9__str__(PyObject *__pyx_v_self) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__str__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_8__str__(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_8__str__(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { addr_t __pyx_v_size; addr_t __pyx_v_start; addr_t __pyx_v_endex; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__str__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("bytesparse._c.Memory.__str__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
3291: self: 'Memory',
3292: ) -> str:
3293: r"""String representation.
3294:
3295: If :attr:`content_size` is lesser than ``STR_MAX_CONTENT_SIZE``, then
3296: the memory is represented as a list of blocks.
3297:
3298: If exceeding, it is equivalent to :meth:`__repr__`.
3299:
3300:
3301: Returns:
3302: str: String representation.
3303:
3304: Example:
3305: +---+---+---+---+---+---+---+---+---+---+---+
3306: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10|
3307: +===+===+===+===+===+===+===+===+===+===+===+
3308: | |[A | B | C]| | | |[x | y | z]| |
3309: +---+---+---+---+---+---+---+---+---+---+---+
3310:
3311: >>> memory = Memory(blocks=[[1, b'ABC'], [7, b'xyz']])
3312: >>> memory._blocks
3313: 'ABCxyz'
3314: """
3315: cdef:
+3316: addr_t size = self.content_size_()
__pyx_v_size = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->content_size_(__pyx_v_self);
3317: addr_t start
3318: addr_t endex
3319:
+3320: if size > STR_MAX_CONTENT_SIZE:
__pyx_t_1 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_size); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3320, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_STR_MAX_CONTENT_SIZE); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 3320, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_3 = PyObject_RichCompare(__pyx_t_1, __pyx_t_2, Py_GT); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 3320, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_4 < 0)) __PYX_ERR(0, 3320, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; if (__pyx_t_4) { /* … */ }
+3321: start = self.start_()
__pyx_v_start = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->start_(__pyx_v_self);
+3322: endex = self.endex_()
__pyx_v_endex = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->endex_(__pyx_v_self);
+3323: return f'<{type(self).__name__}[0x{start:X}:0x{endex:X}]@0x{id(self):X}>'
__Pyx_XDECREF(__pyx_r); __pyx_t_3 = PyTuple_New(9); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 3323, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_5 = 0; __pyx_t_6 = 127; __Pyx_INCREF(__pyx_kp_u__10); __pyx_t_5 += 1; __Pyx_GIVEREF(__pyx_kp_u__10); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_kp_u__10); __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self))), __pyx_n_s_name); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 3323, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_1 = __Pyx_PyObject_FormatSimple(__pyx_t_2, __pyx_empty_unicode); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3323, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_t_6 = (__Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_1) > __pyx_t_6) ? __Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_1) : __pyx_t_6; __pyx_t_5 += __Pyx_PyUnicode_GET_LENGTH(__pyx_t_1); __Pyx_GIVEREF(__pyx_t_1); PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_1); __pyx_t_1 = 0; __Pyx_INCREF(__pyx_kp_u_0x); __pyx_t_5 += 3; __Pyx_GIVEREF(__pyx_kp_u_0x); PyTuple_SET_ITEM(__pyx_t_3, 2, __pyx_kp_u_0x); __pyx_t_1 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_start); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3323, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_t_2 = __Pyx_PyObject_Format(__pyx_t_1, __pyx_n_u_X); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 3323, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_t_6 = (__Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_2) > __pyx_t_6) ? __Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_2) : __pyx_t_6; __pyx_t_5 += __Pyx_PyUnicode_GET_LENGTH(__pyx_t_2); __Pyx_GIVEREF(__pyx_t_2); PyTuple_SET_ITEM(__pyx_t_3, 3, __pyx_t_2); __pyx_t_2 = 0; __Pyx_INCREF(__pyx_kp_u_0x_2); __pyx_t_5 += 3; __Pyx_GIVEREF(__pyx_kp_u_0x_2); PyTuple_SET_ITEM(__pyx_t_3, 4, __pyx_kp_u_0x_2); __pyx_t_2 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_endex); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 3323, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_1 = __Pyx_PyObject_Format(__pyx_t_2, __pyx_n_u_X); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3323, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_t_6 = (__Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_1) > __pyx_t_6) ? __Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_1) : __pyx_t_6; __pyx_t_5 += __Pyx_PyUnicode_GET_LENGTH(__pyx_t_1); __Pyx_GIVEREF(__pyx_t_1); PyTuple_SET_ITEM(__pyx_t_3, 5, __pyx_t_1); __pyx_t_1 = 0; __Pyx_INCREF(__pyx_kp_u_0x_3); __pyx_t_5 += 4; __Pyx_GIVEREF(__pyx_kp_u_0x_3); PyTuple_SET_ITEM(__pyx_t_3, 6, __pyx_kp_u_0x_3); __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_builtin_id, ((PyObject *)__pyx_v_self)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3323, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_t_2 = __Pyx_PyObject_Format(__pyx_t_1, __pyx_n_u_X); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 3323, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_t_6 = (__Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_2) > __pyx_t_6) ? __Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_2) : __pyx_t_6; __pyx_t_5 += __Pyx_PyUnicode_GET_LENGTH(__pyx_t_2); __Pyx_GIVEREF(__pyx_t_2); PyTuple_SET_ITEM(__pyx_t_3, 7, __pyx_t_2); __pyx_t_2 = 0; __Pyx_INCREF(__pyx_kp_u__11); __pyx_t_5 += 1; __Pyx_GIVEREF(__pyx_kp_u__11); PyTuple_SET_ITEM(__pyx_t_3, 8, __pyx_kp_u__11); __pyx_t_2 = __Pyx_PyUnicode_Join(__pyx_t_3, 9, __pyx_t_5, __pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 3323, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __pyx_r = __pyx_t_2; __pyx_t_2 = 0; goto __pyx_L0;
3324:
3325: else:
+3326: return str(self._to_blocks())
/*else*/ { __Pyx_XDECREF(__pyx_r); __pyx_t_3 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_to_blocks); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 3326, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_1 = NULL; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) { __pyx_t_1 = PyMethod_GET_SELF(__pyx_t_3); if (likely(__pyx_t_1)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); __Pyx_INCREF(__pyx_t_1); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_3, function); } } __pyx_t_2 = (__pyx_t_1) ? __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_t_1) : __Pyx_PyObject_CallNoArg(__pyx_t_3); __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 3326, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __pyx_t_3 = __Pyx_PyObject_CallOneArg(((PyObject *)(&PyUnicode_Type)), __pyx_t_2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 3326, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_r = __pyx_t_3; __pyx_t_3 = 0; goto __pyx_L0; }
3327:
+3328: def __bool__(
/* Python wrapper */ static int __pyx_pw_10bytesparse_2_c_6Memory_11__bool__(PyObject *__pyx_v_self); /*proto*/ static int __pyx_pw_10bytesparse_2_c_6Memory_11__bool__(PyObject *__pyx_v_self) { int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__bool__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_10__bool__(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static int __pyx_pf_10bytesparse_2_c_6Memory_10__bool__(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__bool__", 0); /* … */ /* function exit code */ __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
3329: self: 'Memory',
3330: ) -> bool:
3331: r"""Has any items.
3332:
3333: Returns:
3334: bool: Has any items.
3335:
3336: Examples:
3337: >>> memory = Memory()
3338: >>> bool(memory)
3339: False
3340:
3341: >>> memory = Memory(data=b'Hello, World!', offset=5)
3342: >>> bool(memory)
3343: True
3344: """
3345:
+3346: return Rack_Length(self._) > 0
__pyx_r = (__pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_self->_) > 0); goto __pyx_L0;
3347:
+3348: cdef bint __eq__same_(self, Memory other) except -1:
static int __pyx_f_10bytesparse_2_c_6Memory___eq__same_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_other) { int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__eq__same_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Memory.__eq__same_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+3349: return Rack_Eq(self._, (<Memory>other)._)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_Rack_Eq(__pyx_v_self->_, __pyx_v_other->_); if (unlikely(__pyx_t_1 == ((int)-1))) __PYX_ERR(0, 3349, __pyx_L1_error)
__pyx_r = __pyx_t_1;
goto __pyx_L0;
3350:
+3351: cdef bint __eq__raw_(self, size_t data_size, const byte_t* data_ptr) except -1:
static int __pyx_f_10bytesparse_2_c_6Memory___eq__raw_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, size_t __pyx_v_data_size, byte_t const *__pyx_v_data_ptr) { Rack_ const *__pyx_v_blocks; size_t __pyx_v_block_count; Block_ const *__pyx_v_block; size_t __pyx_v_size; int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__eq__raw_", 0); /* … */ /* function exit code */ __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
3352: cdef:
+3353: const Rack_* blocks = self._
__pyx_t_1 = __pyx_v_self->_; __pyx_v_blocks = __pyx_t_1;
3354: size_t block_count
3355: const Block_* block
3356: size_t size
3357:
+3358: block_count = Rack_Length(blocks)
__pyx_v_block_count = __pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks);
+3359: if block_count:
__pyx_t_2 = (__pyx_v_block_count != 0); if (__pyx_t_2) { /* … */ }
+3360: if block_count != 1:
__pyx_t_2 = ((__pyx_v_block_count != 1) != 0); if (__pyx_t_2) { /* … */ }
+3361: return False
__pyx_r = 0; goto __pyx_L0;
3362:
+3363: block = Rack_First__(blocks)
__pyx_v_block = __pyx_f_10bytesparse_2_c_Rack_First__(__pyx_v_blocks);
+3364: size = Block_Length(block)
__pyx_v_size = __pyx_f_10bytesparse_2_c_Block_Length(__pyx_v_block);
+3365: if data_size != size:
__pyx_t_2 = ((__pyx_v_data_size != __pyx_v_size) != 0); if (__pyx_t_2) { /* … */ }
+3366: return False
__pyx_r = 0; goto __pyx_L0;
3367:
+3368: if memcmp(Block_At__(block, 0), data_ptr, data_size):
__pyx_t_2 = (memcmp(__pyx_f_10bytesparse_2_c_Block_At__(__pyx_v_block, 0), __pyx_v_data_ptr, __pyx_v_data_size) != 0); if (__pyx_t_2) { /* … */ }
+3369: return False
__pyx_r = 0; goto __pyx_L0;
+3370: return True
__pyx_r = 1; goto __pyx_L0;
3371: else:
+3372: return not data_size
/*else*/ { __pyx_r = (!(__pyx_v_data_size != 0)); goto __pyx_L0; }
3373:
+3374: cdef bint __eq__view_(self, const byte_t[:] view) except -1:
static int __pyx_f_10bytesparse_2_c_6Memory___eq__view_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, __Pyx_memviewslice __pyx_v_view) { int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__eq__view_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Memory.__eq__view_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_gb_10bytesparse_2_c_6Memory_11__eq__iter__2generator9(__pyx_CoroutineObject *__pyx_generator, CYTHON_UNUSED PyThreadState *__pyx_tstate, PyObject *__pyx_sent_value); /* proto */
3375: with cython.boundscheck(False):
+3376: return self.__eq__raw_(len(view), &view[0])
__pyx_t_1 = __Pyx_MemoryView_Len(__pyx_v_view); __pyx_t_2 = 0; if (__pyx_t_2 < 0) __pyx_t_2 += __pyx_v_view.shape[0]; __pyx_t_3 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->__pyx___eq__raw_(__pyx_v_self, __pyx_t_1, (&(*((byte_t const *) ( /* dim=0 */ (__pyx_v_view.data + __pyx_t_2 * __pyx_v_view.strides[0]) ))))); if (unlikely(__pyx_t_3 == ((int)-1))) __PYX_ERR(0, 3376, __pyx_L1_error) __pyx_r = __pyx_t_3; goto __pyx_L0;
3377:
+3378: cdef bint __eq__iter_(self, iterable) except -1:
static int __pyx_f_10bytesparse_2_c_6Memory___eq__iter_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_iterable) { struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_1___pyx___eq__iter_ *__pyx_cur_scope; int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__eq__iter_", 0); __pyx_cur_scope = (struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_1___pyx___eq__iter_ *)__pyx_tp_new_10bytesparse_2_c___pyx_scope_struct_1___pyx___eq__iter_(__pyx_ptype_10bytesparse_2_c___pyx_scope_struct_1___pyx___eq__iter_, __pyx_empty_tuple, NULL); if (unlikely(!__pyx_cur_scope)) { __pyx_cur_scope = ((struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_1___pyx___eq__iter_ *)Py_None); __Pyx_INCREF(Py_None); __PYX_ERR(0, 3378, __pyx_L1_error) } else { __Pyx_GOTREF(__pyx_cur_scope); } /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_4); __Pyx_XDECREF(__pyx_t_5); __Pyx_XDECREF(__pyx_t_7); __Pyx_AddTraceback("bytesparse._c.Memory.__eq__iter_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_DECREF(((PyObject *)__pyx_cur_scope)); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* … */ struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_1___pyx___eq__iter_ { PyObject_HEAD PyObject *__pyx_v_iter_other; PyObject *__pyx_v_iter_self; };
+3379: iter_self = _islice(self, len(self)) # avoid infinite loop
__Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_islice); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 3379, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_3 = PyObject_Length(((PyObject *)__pyx_v_self)); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-1))) __PYX_ERR(0, 3379, __pyx_L1_error) __pyx_t_4 = PyInt_FromSsize_t(__pyx_t_3); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 3379, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __pyx_t_5 = NULL; __pyx_t_6 = 0; if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) { __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_2); if (likely(__pyx_t_5)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); __Pyx_INCREF(__pyx_t_5); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_2, function); __pyx_t_6 = 1; } } #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_2)) { PyObject *__pyx_temp[3] = {__pyx_t_5, ((PyObject *)__pyx_v_self), __pyx_t_4}; __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_2, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3379, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_2)) { PyObject *__pyx_temp[3] = {__pyx_t_5, ((PyObject *)__pyx_v_self), __pyx_t_4}; __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_2, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3379, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; } else #endif { __pyx_t_7 = PyTuple_New(2+__pyx_t_6); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 3379, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); if (__pyx_t_5) { __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_5); __pyx_t_5 = NULL; } __Pyx_INCREF(((PyObject *)__pyx_v_self)); __Pyx_GIVEREF(((PyObject *)__pyx_v_self)); PyTuple_SET_ITEM(__pyx_t_7, 0+__pyx_t_6, ((PyObject *)__pyx_v_self)); __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_7, 1+__pyx_t_6, __pyx_t_4); __pyx_t_4 = 0; __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_7, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3379, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; } __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __Pyx_GIVEREF(__pyx_t_1); __pyx_cur_scope->__pyx_v_iter_self = __pyx_t_1; __pyx_t_1 = 0;
+3380: iter_other = iter(iterable)
__pyx_t_1 = PyObject_GetIter(__pyx_v_iterable); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3380, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_GIVEREF(__pyx_t_1); __pyx_cur_scope->__pyx_v_iter_other = __pyx_t_1; __pyx_t_1 = 0;
+3381: return all(a == b for a, b in _zip_longest(iter_self, iter_other, fillvalue=None))
static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_11__eq__iter__genexpr(PyObject *__pyx_self) { struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_2_genexpr *__pyx_cur_scope; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("genexpr", 0); __pyx_cur_scope = (struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_2_genexpr *)__pyx_tp_new_10bytesparse_2_c___pyx_scope_struct_2_genexpr(__pyx_ptype_10bytesparse_2_c___pyx_scope_struct_2_genexpr, __pyx_empty_tuple, NULL); if (unlikely(!__pyx_cur_scope)) { __pyx_cur_scope = ((struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_2_genexpr *)Py_None); __Pyx_INCREF(Py_None); __PYX_ERR(0, 3381, __pyx_L1_error) } else { __Pyx_GOTREF(__pyx_cur_scope); } __pyx_cur_scope->__pyx_outer_scope = (struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_1___pyx___eq__iter_ *) __pyx_self; __Pyx_INCREF(((PyObject *)__pyx_cur_scope->__pyx_outer_scope)); __Pyx_GIVEREF(__pyx_cur_scope->__pyx_outer_scope); { __pyx_CoroutineObject *gen = __Pyx_Generator_New((__pyx_coroutine_body_t) __pyx_gb_10bytesparse_2_c_6Memory_11__eq__iter__2generator9, NULL, (PyObject *) __pyx_cur_scope, __pyx_n_s_genexpr, __pyx_n_s_Memory___eq__iter__locals_genexp, __pyx_n_s_bytesparse__c); if (unlikely(!gen)) __PYX_ERR(0, 3381, __pyx_L1_error) __Pyx_DECREF(__pyx_cur_scope); __Pyx_RefNannyFinishContext(); return (PyObject *) gen; } /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Memory.__eq__iter_.genexpr", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __Pyx_DECREF(((PyObject *)__pyx_cur_scope)); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_gb_10bytesparse_2_c_6Memory_11__eq__iter__2generator9(__pyx_CoroutineObject *__pyx_generator, CYTHON_UNUSED PyThreadState *__pyx_tstate, PyObject *__pyx_sent_value) /* generator body */ { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("genexpr", 0); __pyx_L3_first_run:; if (unlikely(!__pyx_sent_value)) __PYX_ERR(0, 3381, __pyx_L1_error) __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_zip_longest); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3381, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); if (unlikely(!__pyx_cur_scope->__pyx_outer_scope->__pyx_v_iter_self)) { __Pyx_RaiseClosureNameError("iter_self"); __PYX_ERR(0, 3381, __pyx_L1_error) } if (unlikely(!__pyx_cur_scope->__pyx_outer_scope->__pyx_v_iter_other)) { __Pyx_RaiseClosureNameError("iter_other"); __PYX_ERR(0, 3381, __pyx_L1_error) } __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 3381, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_INCREF(__pyx_cur_scope->__pyx_outer_scope->__pyx_v_iter_self); __Pyx_GIVEREF(__pyx_cur_scope->__pyx_outer_scope->__pyx_v_iter_self); PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_cur_scope->__pyx_outer_scope->__pyx_v_iter_self); __Pyx_INCREF(__pyx_cur_scope->__pyx_outer_scope->__pyx_v_iter_other); __Pyx_GIVEREF(__pyx_cur_scope->__pyx_outer_scope->__pyx_v_iter_other); PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_cur_scope->__pyx_outer_scope->__pyx_v_iter_other); __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 3381, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_fillvalue, Py_None) < 0) __PYX_ERR(0, 3381, __pyx_L1_error) __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_2, __pyx_t_3); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 3381, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; if (likely(PyList_CheckExact(__pyx_t_4)) || PyTuple_CheckExact(__pyx_t_4)) { __pyx_t_3 = __pyx_t_4; __Pyx_INCREF(__pyx_t_3); __pyx_t_5 = 0; __pyx_t_6 = NULL; } else { __pyx_t_5 = -1; __pyx_t_3 = PyObject_GetIter(__pyx_t_4); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 3381, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_6 = Py_TYPE(__pyx_t_3)->tp_iternext; if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 3381, __pyx_L1_error) } __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; for (;;) { if (likely(!__pyx_t_6)) { if (likely(PyList_CheckExact(__pyx_t_3))) { if (__pyx_t_5 >= PyList_GET_SIZE(__pyx_t_3)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS __pyx_t_4 = PyList_GET_ITEM(__pyx_t_3, __pyx_t_5); __Pyx_INCREF(__pyx_t_4); __pyx_t_5++; if (unlikely(0 < 0)) __PYX_ERR(0, 3381, __pyx_L1_error) #else __pyx_t_4 = PySequence_ITEM(__pyx_t_3, __pyx_t_5); __pyx_t_5++; if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 3381, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); #endif } else { if (__pyx_t_5 >= PyTuple_GET_SIZE(__pyx_t_3)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS __pyx_t_4 = PyTuple_GET_ITEM(__pyx_t_3, __pyx_t_5); __Pyx_INCREF(__pyx_t_4); __pyx_t_5++; if (unlikely(0 < 0)) __PYX_ERR(0, 3381, __pyx_L1_error) #else __pyx_t_4 = PySequence_ITEM(__pyx_t_3, __pyx_t_5); __pyx_t_5++; if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 3381, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); #endif } } else { __pyx_t_4 = __pyx_t_6(__pyx_t_3); if (unlikely(!__pyx_t_4)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); else __PYX_ERR(0, 3381, __pyx_L1_error) } break; } __Pyx_GOTREF(__pyx_t_4); } if ((likely(PyTuple_CheckExact(__pyx_t_4))) || (PyList_CheckExact(__pyx_t_4))) { PyObject* sequence = __pyx_t_4; Py_ssize_t size = __Pyx_PySequence_SIZE(sequence); if (unlikely(size != 2)) { if (size > 2) __Pyx_RaiseTooManyValuesError(2); else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); __PYX_ERR(0, 3381, __pyx_L1_error) } #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS if (likely(PyTuple_CheckExact(sequence))) { __pyx_t_2 = PyTuple_GET_ITEM(sequence, 0); __pyx_t_1 = PyTuple_GET_ITEM(sequence, 1); } else { __pyx_t_2 = PyList_GET_ITEM(sequence, 0); __pyx_t_1 = PyList_GET_ITEM(sequence, 1); } __Pyx_INCREF(__pyx_t_2); __Pyx_INCREF(__pyx_t_1); #else __pyx_t_2 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 3381, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_1 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3381, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); #endif __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; } else { Py_ssize_t index = -1; __pyx_t_7 = PyObject_GetIter(__pyx_t_4); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 3381, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __pyx_t_8 = Py_TYPE(__pyx_t_7)->tp_iternext; index = 0; __pyx_t_2 = __pyx_t_8(__pyx_t_7); if (unlikely(!__pyx_t_2)) goto __pyx_L6_unpacking_failed; __Pyx_GOTREF(__pyx_t_2); index = 1; __pyx_t_1 = __pyx_t_8(__pyx_t_7); if (unlikely(!__pyx_t_1)) goto __pyx_L6_unpacking_failed; __Pyx_GOTREF(__pyx_t_1); if (__Pyx_IternextUnpackEndCheck(__pyx_t_8(__pyx_t_7), 2) < 0) __PYX_ERR(0, 3381, __pyx_L1_error) __pyx_t_8 = NULL; __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; goto __pyx_L7_unpacking_done; __pyx_L6_unpacking_failed:; __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; __pyx_t_8 = NULL; if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); __PYX_ERR(0, 3381, __pyx_L1_error) __pyx_L7_unpacking_done:; } __Pyx_XGOTREF(__pyx_cur_scope->__pyx_v_a); __Pyx_XDECREF_SET(__pyx_cur_scope->__pyx_v_a, __pyx_t_2); __Pyx_GIVEREF(__pyx_t_2); __pyx_t_2 = 0; __Pyx_XGOTREF(__pyx_cur_scope->__pyx_v_b); __Pyx_XDECREF_SET(__pyx_cur_scope->__pyx_v_b, __pyx_t_1); __Pyx_GIVEREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_t_4 = PyObject_RichCompare(__pyx_cur_scope->__pyx_v_a, __pyx_cur_scope->__pyx_v_b, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 3381, __pyx_L1_error) __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 3381, __pyx_L1_error) __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __pyx_t_10 = ((!__pyx_t_9) != 0); if (__pyx_t_10) { __Pyx_XDECREF(__pyx_r); __Pyx_INCREF(Py_False); __pyx_r = Py_False; __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; goto __pyx_L0; } } /*else*/ { __Pyx_XDECREF(__pyx_r); __Pyx_INCREF(Py_True); __pyx_r = Py_True; __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; goto __pyx_L0; } __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; CYTHON_MAYBE_UNUSED_VAR(__pyx_cur_scope); /* function exit code */ goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_4); __Pyx_XDECREF(__pyx_t_7); __Pyx_AddTraceback("genexpr", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); #if !CYTHON_USE_EXC_INFO_STACK __Pyx_Coroutine_ResetAndClearException(__pyx_generator); #endif __pyx_generator->resume_label = -1; __Pyx_Coroutine_clear((PyObject*)__pyx_generator); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* … */ __pyx_t_1 = __pyx_pf_10bytesparse_2_c_6Memory_11__eq__iter__genexpr(((PyObject*)__pyx_cur_scope)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3381, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_t_2 = __Pyx_Generator_Next(__pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 3381, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_t_8 = __Pyx_PyObject_IsTrue(__pyx_t_2); if (unlikely((__pyx_t_8 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 3381, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_r = __pyx_t_8; goto __pyx_L0; /* … */ struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_2_genexpr { PyObject_HEAD struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_1___pyx___eq__iter_ *__pyx_outer_scope; PyObject *__pyx_v_a; PyObject *__pyx_v_b; };
3382:
+3383: def __eq__(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_13__eq__(PyObject *__pyx_v_self, PyObject *__pyx_v_other); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_12__eq__[] = "Equality comparison.\n\n Arguments:\n other (Memory):\n Data to compare with `self`.\n\n If it is a :obj:`Memory`, all of its blocks must match.\n\n If it is a :obj:`list`, it is expected that it contains the\n same blocks as `self`.\n\n Otherwise, it must match the first stored block, considered\n equal if also starts at 0.\n\n Returns:\n bool: `self` is equal to `other`.\n\n Examples:\n >>> data = b'Hello, World!'\n >>> memory = Memory(data=data)\n >>> memory == data\n True\n >>> memory.shift(1)\n >>> memory == data\n False\n\n >>> data = b'Hello, World!'\n >>> memory = Memory(data=data)\n >>> memory == [[0, data]]\n True\n >>> memory == list(data)\n False\n >>> memory.shift(1)\n >>> memory == [[0, data]]\n False\n "; #if CYTHON_COMPILING_IN_CPYTHON struct wrapperbase __pyx_wrapperbase_10bytesparse_2_c_6Memory_12__eq__; #endif static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_13__eq__(PyObject *__pyx_v_self, PyObject *__pyx_v_other) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__eq__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_12__eq__(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), ((PyObject *)__pyx_v_other)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_12__eq__(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_other) { CYTHON_UNUSED __Pyx_memviewslice __pyx_v_view = { 0, 0, { 0 }, { 0 }, { 0 } }; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__eq__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_3); __PYX_XDEC_MEMVIEW(&__pyx_t_7, 1); __PYX_XDEC_MEMVIEW(&__pyx_t_8, 1); __Pyx_XDECREF(__pyx_t_10); __Pyx_XDECREF(__pyx_t_11); __Pyx_XDECREF(__pyx_t_12); __Pyx_AddTraceback("bytesparse._c.Memory.__eq__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __PYX_XDEC_MEMVIEW(&__pyx_v_view, 1); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_gb_10bytesparse_2_c_6Memory_16generator1(__pyx_CoroutineObject *__pyx_generator, CYTHON_UNUSED PyThreadState *__pyx_tstate, PyObject *__pyx_sent_value); /* proto */
3384: self: 'Memory',
3385: other: Any,
3386: ) -> bool:
3387: r"""Equality comparison.
3388:
3389: Arguments:
3390: other (Memory):
3391: Data to compare with `self`.
3392:
3393: If it is a :obj:`Memory`, all of its blocks must match.
3394:
3395: If it is a :obj:`list`, it is expected that it contains the
3396: same blocks as `self`.
3397:
3398: Otherwise, it must match the first stored block, considered
3399: equal if also starts at 0.
3400:
3401: Returns:
3402: bool: `self` is equal to `other`.
3403:
3404: Examples:
3405: >>> data = b'Hello, World!'
3406: >>> memory = Memory(data=data)
3407: >>> memory == data
3408: True
3409: >>> memory.shift(1)
3410: >>> memory == data
3411: False
3412:
3413: >>> data = b'Hello, World!'
3414: >>> memory = Memory(data=data)
3415: >>> memory == [[0, data]]
3416: True
3417: >>> memory == list(data)
3418: False
3419: >>> memory.shift(1)
3420: >>> memory == [[0, data]]
3421: False
3422: """
3423: cdef:
3424: const byte_t[:] view
3425:
+3426: if isinstance(other, Memory):
__pyx_t_1 = __Pyx_TypeCheck(__pyx_v_other, __pyx_ptype_10bytesparse_2_c_Memory);
__pyx_t_2 = (__pyx_t_1 != 0);
if (__pyx_t_2) {
/* … */
}
+3427: return self.__eq__same_(other)
__Pyx_XDECREF(__pyx_r); if (!(likely(((__pyx_v_other) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_other, __pyx_ptype_10bytesparse_2_c_Memory))))) __PYX_ERR(0, 3427, __pyx_L1_error) __pyx_t_2 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->__pyx___eq__same_(__pyx_v_self, ((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_other)); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 3427, __pyx_L1_error) __pyx_t_3 = __Pyx_PyBool_FromLong(__pyx_t_2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 3427, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_r = __pyx_t_3; __pyx_t_3 = 0; goto __pyx_L0;
3428: else:
+3429: try:
/*else*/ { { /*try:*/ { /* … */ } /* … */ __Pyx_XGIVEREF(__pyx_t_4); __Pyx_XGIVEREF(__pyx_t_5); __Pyx_XGIVEREF(__pyx_t_6); __Pyx_ExceptionReset(__pyx_t_4, __pyx_t_5, __pyx_t_6); goto __pyx_L1_error; __pyx_L7_except_return:; __Pyx_XGIVEREF(__pyx_t_4); __Pyx_XGIVEREF(__pyx_t_5); __Pyx_XGIVEREF(__pyx_t_6); __Pyx_ExceptionReset(__pyx_t_4, __pyx_t_5, __pyx_t_6); goto __pyx_L0; } }
+3430: view = other
__pyx_t_7 = __Pyx_PyObject_to_MemoryviewSlice_ds_nn_byte_t__const__(__pyx_v_other, 0); if (unlikely(!__pyx_t_7.memview)) __PYX_ERR(0, 3430, __pyx_L4_error) __pyx_v_view = __pyx_t_7; __pyx_t_7.memview = NULL; __pyx_t_7.data = NULL;
+3431: except TypeError:
__pyx_t_9 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_TypeError); if (__pyx_t_9) { __Pyx_AddTraceback("bytesparse._c.Memory.__eq__", __pyx_clineno, __pyx_lineno, __pyx_filename); if (__Pyx_GetException(&__pyx_t_3, &__pyx_t_10, &__pyx_t_11) < 0) __PYX_ERR(0, 3431, __pyx_L6_except_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_GOTREF(__pyx_t_10); __Pyx_GOTREF(__pyx_t_11);
+3432: return self.__eq__iter_(other)
__Pyx_XDECREF(__pyx_r); __pyx_t_2 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->__pyx___eq__iter_(__pyx_v_self, __pyx_v_other); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 3432, __pyx_L6_except_error) __pyx_t_12 = __Pyx_PyBool_FromLong(__pyx_t_2); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 3432, __pyx_L6_except_error) __Pyx_GOTREF(__pyx_t_12); __pyx_r = __pyx_t_12; __pyx_t_12 = 0; __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; goto __pyx_L7_except_return; } goto __pyx_L6_except_error; __pyx_L6_except_error:;
3433: else:
+3434: return self.__eq__view_(other)
/*else:*/ { __Pyx_XDECREF(__pyx_r); __pyx_t_8 = __Pyx_PyObject_to_MemoryviewSlice_ds_nn_byte_t__const__(__pyx_v_other, 0); if (unlikely(!__pyx_t_8.memview)) __PYX_ERR(0, 3434, __pyx_L6_except_error) __pyx_t_2 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->__pyx___eq__view_(__pyx_v_self, __pyx_t_8); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 3434, __pyx_L6_except_error) __PYX_XDEC_MEMVIEW(&__pyx_t_8, 1); __pyx_t_8.memview = NULL; __pyx_t_8.data = NULL; __pyx_t_3 = __Pyx_PyBool_FromLong(__pyx_t_2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 3434, __pyx_L6_except_error) __Pyx_GOTREF(__pyx_t_3); __pyx_r = __pyx_t_3; __pyx_t_3 = 0; goto __pyx_L7_except_return; } __pyx_L4_error:; __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_XDEC_MEMVIEW(&__pyx_t_7, 1);
3435:
+3436: def __iter__(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_15__iter__(PyObject *__pyx_v_self); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_14__iter__[] = "Iterates over values.\n\n Iterates over values between :attr:`start` and :attr:`endex`.\n\n Yields:\n int: Value as byte integer, or ``None``.\n "; #if CYTHON_COMPILING_IN_CPYTHON struct wrapperbase __pyx_wrapperbase_10bytesparse_2_c_6Memory_14__iter__; #endif static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_15__iter__(PyObject *__pyx_v_self) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__iter__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_14__iter__(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_14__iter__(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_3___iter__ *__pyx_cur_scope; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__iter__", 0); __pyx_cur_scope = (struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_3___iter__ *)__pyx_tp_new_10bytesparse_2_c___pyx_scope_struct_3___iter__(__pyx_ptype_10bytesparse_2_c___pyx_scope_struct_3___iter__, __pyx_empty_tuple, NULL); if (unlikely(!__pyx_cur_scope)) { __pyx_cur_scope = ((struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_3___iter__ *)Py_None); __Pyx_INCREF(Py_None); __PYX_ERR(0, 3436, __pyx_L1_error) } else { __Pyx_GOTREF(__pyx_cur_scope); } __pyx_cur_scope->__pyx_v_self = __pyx_v_self; __Pyx_INCREF((PyObject *)__pyx_cur_scope->__pyx_v_self); __Pyx_GIVEREF((PyObject *)__pyx_cur_scope->__pyx_v_self); { __pyx_CoroutineObject *gen = __Pyx_Generator_New((__pyx_coroutine_body_t) __pyx_gb_10bytesparse_2_c_6Memory_16generator1, NULL, (PyObject *) __pyx_cur_scope, __pyx_n_s_iter, __pyx_n_s_Memory___iter, __pyx_n_s_bytesparse__c); if (unlikely(!gen)) __PYX_ERR(0, 3436, __pyx_L1_error) __Pyx_DECREF(__pyx_cur_scope); __Pyx_RefNannyFinishContext(); return (PyObject *) gen; } /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Memory.__iter__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __Pyx_DECREF(((PyObject *)__pyx_cur_scope)); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_gb_10bytesparse_2_c_6Memory_16generator1(__pyx_CoroutineObject *__pyx_generator, CYTHON_UNUSED PyThreadState *__pyx_tstate, PyObject *__pyx_sent_value) /* generator body */ { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__iter__", 0); __pyx_L3_first_run:; if (unlikely(!__pyx_sent_value)) __PYX_ERR(0, 3436, __pyx_L1_error) /* … */ /* function exit code */ PyErr_SetNone(PyExc_StopIteration); goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("__iter__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_L0:; __Pyx_XDECREF(__pyx_r); __pyx_r = 0; #if !CYTHON_USE_EXC_INFO_STACK __Pyx_Coroutine_ResetAndClearException(__pyx_generator); #endif __pyx_generator->resume_label = -1; __Pyx_Coroutine_clear((PyObject*)__pyx_generator); __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_gb_10bytesparse_2_c_6Memory_19generator2(__pyx_CoroutineObject *__pyx_generator, CYTHON_UNUSED PyThreadState *__pyx_tstate, PyObject *__pyx_sent_value); /* proto */ /* … */ struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_3___iter__ { PyObject_HEAD struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self; };
3437: self: 'Memory',
3438: ) -> Iterator[Optional[Value]]:
3439: r"""Iterates over values.
3440:
3441: Iterates over values between :attr:`start` and :attr:`endex`.
3442:
3443: Yields:
3444: int: Value as byte integer, or ``None``.
3445: """
3446:
+3447: yield from self.values()
__pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_cur_scope->__pyx_v_self), __pyx_n_s_values); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 3447, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_3 = NULL; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); if (likely(__pyx_t_3)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); __Pyx_INCREF(__pyx_t_3); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_2, function); } } __pyx_t_1 = (__pyx_t_3) ? __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3) : __Pyx_PyObject_CallNoArg(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3447, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_r = __Pyx_Generator_Yield_From(__pyx_generator, __pyx_t_1); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_XGOTREF(__pyx_r); if (likely(__pyx_r)) { __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); __Pyx_Coroutine_ResetAndClearException(__pyx_generator); /* return from generator, yielding value */ __pyx_generator->resume_label = 1; return __pyx_r; __pyx_L4_resume_from_yield_from:; if (unlikely(!__pyx_sent_value)) __PYX_ERR(0, 3447, __pyx_L1_error) } else { PyObject* exc_type = __Pyx_PyErr_Occurred(); if (exc_type) { if (likely(exc_type == PyExc_StopIteration || (exc_type != PyExc_GeneratorExit && __Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration)))) PyErr_Clear(); else __PYX_ERR(0, 3447, __pyx_L1_error) } } CYTHON_MAYBE_UNUSED_VAR(__pyx_cur_scope);
3448:
+3449: def __reversed__(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_18__reversed__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_17__reversed__[] = "Memory.__reversed__(self: u'Memory') -> Iterator[Optional[Value]]\nIterates over values, reversed order.\n\n Iterates over values between :attr:`start` and :attr:`endex`, in\n reversed order.\n\n Yields:\n int: Value as byte integer, or ``None``.\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_18__reversed__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__reversed__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_17__reversed__(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_17__reversed__(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_4___reversed__ *__pyx_cur_scope; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__reversed__", 0); __pyx_cur_scope = (struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_4___reversed__ *)__pyx_tp_new_10bytesparse_2_c___pyx_scope_struct_4___reversed__(__pyx_ptype_10bytesparse_2_c___pyx_scope_struct_4___reversed__, __pyx_empty_tuple, NULL); if (unlikely(!__pyx_cur_scope)) { __pyx_cur_scope = ((struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_4___reversed__ *)Py_None); __Pyx_INCREF(Py_None); __PYX_ERR(0, 3449, __pyx_L1_error) } else { __Pyx_GOTREF(__pyx_cur_scope); } __pyx_cur_scope->__pyx_v_self = __pyx_v_self; __Pyx_INCREF((PyObject *)__pyx_cur_scope->__pyx_v_self); __Pyx_GIVEREF((PyObject *)__pyx_cur_scope->__pyx_v_self); { __pyx_CoroutineObject *gen = __Pyx_Generator_New((__pyx_coroutine_body_t) __pyx_gb_10bytesparse_2_c_6Memory_19generator2, NULL, (PyObject *) __pyx_cur_scope, __pyx_n_s_reversed, __pyx_n_s_Memory___reversed, __pyx_n_s_bytesparse__c); if (unlikely(!gen)) __PYX_ERR(0, 3449, __pyx_L1_error) __Pyx_DECREF(__pyx_cur_scope); __Pyx_RefNannyFinishContext(); return (PyObject *) gen; } /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Memory.__reversed__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __Pyx_DECREF(((PyObject *)__pyx_cur_scope)); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_gb_10bytesparse_2_c_6Memory_19generator2(__pyx_CoroutineObject *__pyx_generator, CYTHON_UNUSED PyThreadState *__pyx_tstate, PyObject *__pyx_sent_value) /* generator body */ { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__reversed__", 0); __pyx_L3_first_run:; if (unlikely(!__pyx_sent_value)) __PYX_ERR(0, 3449, __pyx_L1_error) /* … */ /* function exit code */ PyErr_SetNone(PyExc_StopIteration); goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("__reversed__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_L0:; __Pyx_XDECREF(__pyx_r); __pyx_r = 0; #if !CYTHON_USE_EXC_INFO_STACK __Pyx_Coroutine_ResetAndClearException(__pyx_generator); #endif __pyx_generator->resume_label = -1; __Pyx_Coroutine_clear((PyObject*)__pyx_generator); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* … */ struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_4___reversed__ { PyObject_HEAD struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self; };
3450: self: 'Memory',
3451: ) -> Iterator[Optional[Value]]:
3452: r"""Iterates over values, reversed order.
3453:
3454: Iterates over values between :attr:`start` and :attr:`endex`, in
3455: reversed order.
3456:
3457: Yields:
3458: int: Value as byte integer, or ``None``.
3459: """
3460:
+3461: yield from self.rvalues()
__pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_cur_scope->__pyx_v_self), __pyx_n_s_rvalues); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 3461, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_3 = NULL; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); if (likely(__pyx_t_3)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); __Pyx_INCREF(__pyx_t_3); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_2, function); } } __pyx_t_1 = (__pyx_t_3) ? __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3) : __Pyx_PyObject_CallNoArg(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3461, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_r = __Pyx_Generator_Yield_From(__pyx_generator, __pyx_t_1); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_XGOTREF(__pyx_r); if (likely(__pyx_r)) { __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); __Pyx_Coroutine_ResetAndClearException(__pyx_generator); /* return from generator, yielding value */ __pyx_generator->resume_label = 1; return __pyx_r; __pyx_L4_resume_from_yield_from:; if (unlikely(!__pyx_sent_value)) __PYX_ERR(0, 3461, __pyx_L1_error) } else { PyObject* exc_type = __Pyx_PyErr_Occurred(); if (exc_type) { if (likely(exc_type == PyExc_StopIteration || (exc_type != PyExc_GeneratorExit && __Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration)))) PyErr_Clear(); else __PYX_ERR(0, 3461, __pyx_L1_error) } } CYTHON_MAYBE_UNUSED_VAR(__pyx_cur_scope);
3462:
+3463: def __add__(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_21__add__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_21__add__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__add__ (wrapper)", 0); if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_self), __pyx_ptype_10bytesparse_2_c_Memory, 1, "self", 0))) __PYX_ERR(0, 3464, __pyx_L1_error) __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_20__add__(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), ((PyObject *)__pyx_v_value)); int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; /* function exit code */ goto __pyx_L0; __pyx_L1_error:; __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_20__add__(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_value) { struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_memory = NULL; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__add__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("bytesparse._c.Memory.__add__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XDECREF((PyObject *)__pyx_v_memory); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
3464: self: 'Memory',
3465: value: Union[AnyBytes, 'Memory'],
3466: ) -> 'Memory':
3467:
+3468: memory = self.copy_()
__pyx_t_1 = ((PyObject *)((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->copy_(__pyx_v_self)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3468, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_v_memory = ((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_t_1); __pyx_t_1 = 0;
+3469: memory.extend(value)
__pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_memory), __pyx_n_s_extend); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 3469, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_3 = NULL; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); if (likely(__pyx_t_3)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); __Pyx_INCREF(__pyx_t_3); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_2, function); } } __pyx_t_1 = (__pyx_t_3) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_3, __pyx_v_value) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_v_value); __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3469, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+3470: return memory
__Pyx_XDECREF(__pyx_r); __Pyx_INCREF(((PyObject *)__pyx_v_memory)); __pyx_r = ((PyObject *)__pyx_v_memory); goto __pyx_L0;
3471:
+3472: def __iadd__(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_23__iadd__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_23__iadd__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__iadd__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_22__iadd__(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), ((PyObject *)__pyx_v_value)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_22__iadd__(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_value) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__iadd__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("bytesparse._c.Memory.__iadd__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
3473: self: 'Memory',
3474: value: Union[AnyBytes, 'Memory'],
3475: ) -> 'Memory':
3476:
+3477: self.extend(value)
__pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_extend); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 3477, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_3 = NULL; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); if (likely(__pyx_t_3)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); __Pyx_INCREF(__pyx_t_3); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_2, function); } } __pyx_t_1 = (__pyx_t_3) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_3, __pyx_v_value) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_v_value); __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3477, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+3478: return self
__Pyx_XDECREF(__pyx_r); __Pyx_INCREF(((PyObject *)__pyx_v_self)); __pyx_r = ((PyObject *)__pyx_v_self); goto __pyx_L0;
3479:
+3480: def __mul__(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_25__mul__(PyObject *__pyx_v_self, PyObject *__pyx_v_times); /*proto*/ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_25__mul__(PyObject *__pyx_v_self, PyObject *__pyx_v_times) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__mul__ (wrapper)", 0); if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_self), __pyx_ptype_10bytesparse_2_c_Memory, 1, "self", 0))) __PYX_ERR(0, 3481, __pyx_L1_error) __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_24__mul__(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), ((PyObject *)__pyx_v_times)); int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; /* function exit code */ goto __pyx_L0; __pyx_L1_error:; __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_24__mul__(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_times) { struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_memory = 0; addr_t __pyx_v_offset; PyObject *__pyx_v_start = NULL; PyObject *__pyx_v_size = NULL; CYTHON_UNUSED PyObject *__pyx_v_time = NULL; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__mul__", 0); __Pyx_INCREF(__pyx_v_times); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_4); __Pyx_XDECREF(__pyx_t_6); __Pyx_AddTraceback("bytesparse._c.Memory.__mul__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XDECREF((PyObject *)__pyx_v_memory); __Pyx_XDECREF(__pyx_v_start); __Pyx_XDECREF(__pyx_v_size); __Pyx_XDECREF(__pyx_v_time); __Pyx_XDECREF(__pyx_v_times); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
3481: self: 'Memory',
3482: times: int,
3483: ) -> 'Memory':
3484: cdef:
3485: Memory memory
3486: addr_t offset
3487:
+3488: times = int(times)
__pyx_t_1 = __Pyx_PyNumber_Int(__pyx_v_times); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3488, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF_SET(__pyx_v_times, __pyx_t_1); __pyx_t_1 = 0;
+3489: if times < 0:
__pyx_t_1 = PyObject_RichCompare(__pyx_v_times, __pyx_int_0, Py_LT); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3489, __pyx_L1_error) __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 3489, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; if (__pyx_t_2) { /* … */ }
+3490: times = 0
__Pyx_INCREF(__pyx_int_0); __Pyx_DECREF_SET(__pyx_v_times, __pyx_int_0);
3491:
+3492: if times and Rack_Length(self._):
__pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_v_times); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 3492, __pyx_L1_error) if (__pyx_t_3) { } else { __pyx_t_2 = __pyx_t_3; goto __pyx_L5_bool_binop_done; } __pyx_t_3 = (__pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_self->_) != 0); __pyx_t_2 = __pyx_t_3; __pyx_L5_bool_binop_done:; if (__pyx_t_2) { /* … */ }
+3493: start = self.start
__pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_start); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3493, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_v_start = __pyx_t_1; __pyx_t_1 = 0;
+3494: size = self.endex - start
__pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_endex); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3494, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_t_4 = PyNumber_Subtract(__pyx_t_1, __pyx_v_start); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 3494, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_v_size = __pyx_t_4; __pyx_t_4 = 0;
+3495: offset = size # adjust first write
__pyx_t_5 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_size); if (unlikely((__pyx_t_5 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 3495, __pyx_L1_error) __pyx_v_offset = __pyx_t_5;
+3496: memory = self.__deepcopy__()
__pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_deepcopy); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3496, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_t_6 = NULL; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) { __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_1); if (likely(__pyx_t_6)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); __Pyx_INCREF(__pyx_t_6); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_1, function); } } __pyx_t_4 = (__pyx_t_6) ? __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_t_6) : __Pyx_PyObject_CallNoArg(__pyx_t_1); __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 3496, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; if (!(likely(((__pyx_t_4) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_4, __pyx_ptype_10bytesparse_2_c_Memory))))) __PYX_ERR(0, 3496, __pyx_L1_error) __pyx_v_memory = ((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_t_4); __pyx_t_4 = 0;
3497:
+3498: for time in range(times - 1):
__pyx_t_4 = __Pyx_PyInt_SubtractObjC(__pyx_v_times, __pyx_int_1, 1, 0, 0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 3498, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_builtin_range, __pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3498, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; if (likely(PyList_CheckExact(__pyx_t_1)) || PyTuple_CheckExact(__pyx_t_1)) { __pyx_t_4 = __pyx_t_1; __Pyx_INCREF(__pyx_t_4); __pyx_t_7 = 0; __pyx_t_8 = NULL; } else { __pyx_t_7 = -1; __pyx_t_4 = PyObject_GetIter(__pyx_t_1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 3498, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __pyx_t_8 = Py_TYPE(__pyx_t_4)->tp_iternext; if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 3498, __pyx_L1_error) } __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; for (;;) { if (likely(!__pyx_t_8)) { if (likely(PyList_CheckExact(__pyx_t_4))) { if (__pyx_t_7 >= PyList_GET_SIZE(__pyx_t_4)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS __pyx_t_1 = PyList_GET_ITEM(__pyx_t_4, __pyx_t_7); __Pyx_INCREF(__pyx_t_1); __pyx_t_7++; if (unlikely(0 < 0)) __PYX_ERR(0, 3498, __pyx_L1_error) #else __pyx_t_1 = PySequence_ITEM(__pyx_t_4, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3498, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); #endif } else { if (__pyx_t_7 >= PyTuple_GET_SIZE(__pyx_t_4)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS __pyx_t_1 = PyTuple_GET_ITEM(__pyx_t_4, __pyx_t_7); __Pyx_INCREF(__pyx_t_1); __pyx_t_7++; if (unlikely(0 < 0)) __PYX_ERR(0, 3498, __pyx_L1_error) #else __pyx_t_1 = PySequence_ITEM(__pyx_t_4, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3498, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); #endif } } else { __pyx_t_1 = __pyx_t_8(__pyx_t_4); if (unlikely(!__pyx_t_1)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); else __PYX_ERR(0, 3498, __pyx_L1_error) } break; } __Pyx_GOTREF(__pyx_t_1); } __Pyx_XDECREF_SET(__pyx_v_time, __pyx_t_1); __pyx_t_1 = 0; /* … */ } __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+3499: memory.write_same_(offset, self, False, None)
__pyx_t_9 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_memory->__pyx_vtab)->write_same_(__pyx_v_memory, __pyx_v_offset, __pyx_v_self, 0, ((PyObject*)Py_None)); if (unlikely(__pyx_t_9 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 3499, __pyx_L1_error)
+3500: offset += size
__pyx_t_1 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_offset); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3500, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_t_6 = PyNumber_InPlaceAdd(__pyx_t_1, __pyx_v_size); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 3500, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_t_5 = __Pyx_PyInt_As_uint_fast64_t(__pyx_t_6); if (unlikely((__pyx_t_5 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 3500, __pyx_L1_error) __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __pyx_v_offset = __pyx_t_5;
3501:
+3502: return memory
__Pyx_XDECREF(__pyx_r); __Pyx_INCREF(((PyObject *)__pyx_v_memory)); __pyx_r = ((PyObject *)__pyx_v_memory); goto __pyx_L0;
3503: else:
+3504: return Memory()
/*else*/ { __Pyx_XDECREF(__pyx_r); __pyx_t_4 = __Pyx_PyObject_CallNoArg(((PyObject *)__pyx_ptype_10bytesparse_2_c_Memory)); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 3504, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __pyx_r = __pyx_t_4; __pyx_t_4 = 0; goto __pyx_L0; }
3505:
+3506: def __imul__(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_27__imul__(PyObject *__pyx_v_self, PyObject *__pyx_v_times); /*proto*/ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_27__imul__(PyObject *__pyx_v_self, PyObject *__pyx_v_times) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__imul__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_26__imul__(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), ((PyObject *)__pyx_v_times)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_26__imul__(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_times) { struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_memory = 0; addr_t __pyx_v_offset; PyObject *__pyx_v_start = NULL; PyObject *__pyx_v_size = NULL; CYTHON_UNUSED PyObject *__pyx_v_time = NULL; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__imul__", 0); __Pyx_INCREF(__pyx_v_times); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_4); __Pyx_XDECREF(__pyx_t_6); __Pyx_AddTraceback("bytesparse._c.Memory.__imul__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XDECREF((PyObject *)__pyx_v_memory); __Pyx_XDECREF(__pyx_v_start); __Pyx_XDECREF(__pyx_v_size); __Pyx_XDECREF(__pyx_v_time); __Pyx_XDECREF(__pyx_v_times); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
3507: self: 'Memory',
3508: times: int,
3509: ) -> 'Memory':
3510: cdef:
3511: Memory memory
3512: addr_t offset
3513:
+3514: times = int(times)
__pyx_t_1 = __Pyx_PyNumber_Int(__pyx_v_times); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3514, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF_SET(__pyx_v_times, __pyx_t_1); __pyx_t_1 = 0;
+3515: if times < 0:
__pyx_t_1 = PyObject_RichCompare(__pyx_v_times, __pyx_int_0, Py_LT); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3515, __pyx_L1_error) __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 3515, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; if (__pyx_t_2) { /* … */ }
+3516: times = 0
__Pyx_INCREF(__pyx_int_0); __Pyx_DECREF_SET(__pyx_v_times, __pyx_int_0);
3517:
+3518: if times and Rack_Length(self._):
__pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_v_times); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 3518, __pyx_L1_error) if (__pyx_t_3) { } else { __pyx_t_2 = __pyx_t_3; goto __pyx_L5_bool_binop_done; } __pyx_t_3 = (__pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_self->_) != 0); __pyx_t_2 = __pyx_t_3; __pyx_L5_bool_binop_done:; if (__pyx_t_2) { /* … */ goto __pyx_L4; }
+3519: start = self.start
__pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_start); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3519, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_v_start = __pyx_t_1; __pyx_t_1 = 0;
+3520: size = self.endex - start
__pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_endex); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3520, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_t_4 = PyNumber_Subtract(__pyx_t_1, __pyx_v_start); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 3520, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_v_size = __pyx_t_4; __pyx_t_4 = 0;
+3521: offset = size
__pyx_t_5 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_size); if (unlikely((__pyx_t_5 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 3521, __pyx_L1_error) __pyx_v_offset = __pyx_t_5;
+3522: memory = self.__deepcopy__()
__pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_deepcopy); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3522, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_t_6 = NULL; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) { __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_1); if (likely(__pyx_t_6)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); __Pyx_INCREF(__pyx_t_6); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_1, function); } } __pyx_t_4 = (__pyx_t_6) ? __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_t_6) : __Pyx_PyObject_CallNoArg(__pyx_t_1); __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 3522, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; if (!(likely(((__pyx_t_4) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_4, __pyx_ptype_10bytesparse_2_c_Memory))))) __PYX_ERR(0, 3522, __pyx_L1_error) __pyx_v_memory = ((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_t_4); __pyx_t_4 = 0;
3523:
+3524: for time in range(times - 1):
__pyx_t_4 = __Pyx_PyInt_SubtractObjC(__pyx_v_times, __pyx_int_1, 1, 0, 0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 3524, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_builtin_range, __pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3524, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; if (likely(PyList_CheckExact(__pyx_t_1)) || PyTuple_CheckExact(__pyx_t_1)) { __pyx_t_4 = __pyx_t_1; __Pyx_INCREF(__pyx_t_4); __pyx_t_7 = 0; __pyx_t_8 = NULL; } else { __pyx_t_7 = -1; __pyx_t_4 = PyObject_GetIter(__pyx_t_1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 3524, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __pyx_t_8 = Py_TYPE(__pyx_t_4)->tp_iternext; if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 3524, __pyx_L1_error) } __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; for (;;) { if (likely(!__pyx_t_8)) { if (likely(PyList_CheckExact(__pyx_t_4))) { if (__pyx_t_7 >= PyList_GET_SIZE(__pyx_t_4)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS __pyx_t_1 = PyList_GET_ITEM(__pyx_t_4, __pyx_t_7); __Pyx_INCREF(__pyx_t_1); __pyx_t_7++; if (unlikely(0 < 0)) __PYX_ERR(0, 3524, __pyx_L1_error) #else __pyx_t_1 = PySequence_ITEM(__pyx_t_4, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3524, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); #endif } else { if (__pyx_t_7 >= PyTuple_GET_SIZE(__pyx_t_4)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS __pyx_t_1 = PyTuple_GET_ITEM(__pyx_t_4, __pyx_t_7); __Pyx_INCREF(__pyx_t_1); __pyx_t_7++; if (unlikely(0 < 0)) __PYX_ERR(0, 3524, __pyx_L1_error) #else __pyx_t_1 = PySequence_ITEM(__pyx_t_4, __pyx_t_7); __pyx_t_7++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3524, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); #endif } } else { __pyx_t_1 = __pyx_t_8(__pyx_t_4); if (unlikely(!__pyx_t_1)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); else __PYX_ERR(0, 3524, __pyx_L1_error) } break; } __Pyx_GOTREF(__pyx_t_1); } __Pyx_XDECREF_SET(__pyx_v_time, __pyx_t_1); __pyx_t_1 = 0; /* … */ } __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+3525: self.write_same_(offset, memory, False, None)
__pyx_t_9 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->write_same_(__pyx_v_self, __pyx_v_offset, __pyx_v_memory, 0, ((PyObject*)Py_None)); if (unlikely(__pyx_t_9 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 3525, __pyx_L1_error)
+3526: offset += size
__pyx_t_1 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_offset); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3526, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_t_6 = PyNumber_InPlaceAdd(__pyx_t_1, __pyx_v_size); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 3526, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_t_5 = __Pyx_PyInt_As_uint_fast64_t(__pyx_t_6); if (unlikely((__pyx_t_5 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 3526, __pyx_L1_error) __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __pyx_v_offset = __pyx_t_5;
3527: else:
+3528: self._ = Rack_Clear(self._)
/*else*/ {
__pyx_t_10 = __pyx_f_10bytesparse_2_c_Rack_Clear(__pyx_v_self->_); if (unlikely(__pyx_t_10 == ((Rack_ *)NULL))) __PYX_ERR(0, 3528, __pyx_L1_error)
__pyx_v_self->_ = __pyx_t_10;
}
__pyx_L4:;
+3529: return self
__Pyx_XDECREF(__pyx_r); __Pyx_INCREF(((PyObject *)__pyx_v_self)); __pyx_r = ((PyObject *)__pyx_v_self); goto __pyx_L0;
3530:
+3531: def __len__(
/* Python wrapper */ static Py_ssize_t __pyx_pw_10bytesparse_2_c_6Memory_29__len__(PyObject *__pyx_v_self); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_28__len__[] = "Actual length.\n\n Computes the actual length of the stored items, i.e.\n (:attr:`endex` - :attr:`start`).\n This will consider any trimmings being active.\n\n Returns:\n int: Memory length.\n "; #if CYTHON_COMPILING_IN_CPYTHON struct wrapperbase __pyx_wrapperbase_10bytesparse_2_c_6Memory_28__len__; #endif static Py_ssize_t __pyx_pw_10bytesparse_2_c_6Memory_29__len__(PyObject *__pyx_v_self) { Py_ssize_t __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__len__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_28__len__(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static Py_ssize_t __pyx_pf_10bytesparse_2_c_6Memory_28__len__(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { Py_ssize_t __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__len__", 0); /* … */ /* function exit code */ __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
3532: self: 'Memory',
3533: ) -> Address:
3534: r"""Actual length.
3535:
3536: Computes the actual length of the stored items, i.e.
3537: (:attr:`endex` - :attr:`start`).
3538: This will consider any trimmings being active.
3539:
3540: Returns:
3541: int: Memory length.
3542: """
3543:
+3544: return self.endex_() - self.start_()
__pyx_r = (((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->endex_(__pyx_v_self) - ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->start_(__pyx_v_self)); goto __pyx_L0;
3545:
+3546: def ofind(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_31ofind(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_30ofind[] = "Memory.ofind(self: u'Memory', item: Union[AnyBytes, Value], start: Optional[Address] = None, endex: Optional[Address] = None) -> Optional[Address]\nIndex of an item.\n\n Arguments:\n item (items):\n Value to find. Can be either some byte string or an integer.\n\n start (int):\n Inclusive start of the searched range.\n If ``None``, :attr:`start` is considered.\n\n endex (int):\n Exclusive end of the searched range.\n If ``None``, :attr:`endex` is considered.\n\n Returns:\n int: The index of the first item equal to `value`, or ``None``.\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_31ofind(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_item = 0; PyObject *__pyx_v_start = 0; PyObject *__pyx_v_endex = 0; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("ofind (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_item,&__pyx_n_s_start,&__pyx_n_s_endex,0}; PyObject* values[3] = {0,0,0}; /* … */ /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_30ofind(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_item, PyObject *__pyx_v_start, PyObject *__pyx_v_endex) { PyObject *__pyx_v_offset = NULL; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("ofind", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_5); __Pyx_AddTraceback("bytesparse._c.Memory.ofind", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XDECREF(__pyx_v_offset); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
3547: self: 'Memory',
3548: item: Union[AnyBytes, Value],
+3549: start: Optional[Address] = None,
values[1] = ((PyObject *)Py_None);
+3550: endex: Optional[Address] = None,
values[2] = ((PyObject *)Py_None); if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_item)) != 0)) kw_args--; else goto __pyx_L5_argtuple_error; CYTHON_FALLTHROUGH; case 1: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_start); if (value) { values[1] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 2: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_endex); if (value) { values[2] = value; kw_args--; } } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "ofind") < 0)) __PYX_ERR(0, 3546, __pyx_L3_error) } } else { switch (PyTuple_GET_SIZE(__pyx_args)) { case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); break; default: goto __pyx_L5_argtuple_error; } } __pyx_v_item = values[0]; __pyx_v_start = values[1]; __pyx_v_endex = values[2]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("ofind", 0, 1, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 3546, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("bytesparse._c.Memory.ofind", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_30ofind(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), __pyx_v_item, __pyx_v_start, __pyx_v_endex);
3551: ) -> Optional[Address]:
3552: r"""Index of an item.
3553:
3554: Arguments:
3555: item (items):
3556: Value to find. Can be either some byte string or an integer.
3557:
3558: start (int):
3559: Inclusive start of the searched range.
3560: If ``None``, :attr:`start` is considered.
3561:
3562: endex (int):
3563: Exclusive end of the searched range.
3564: If ``None``, :attr:`endex` is considered.
3565:
3566: Returns:
3567: int: The index of the first item equal to `value`, or ``None``.
3568: """
3569:
+3570: offset = self.find(item, start, endex)
__pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_find); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 3570, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_3 = NULL; __pyx_t_4 = 0; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); if (likely(__pyx_t_3)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); __Pyx_INCREF(__pyx_t_3); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_2, function); __pyx_t_4 = 1; } } #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_2)) { PyObject *__pyx_temp[4] = {__pyx_t_3, __pyx_v_item, __pyx_v_start, __pyx_v_endex}; __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_2, __pyx_temp+1-__pyx_t_4, 3+__pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3570, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_GOTREF(__pyx_t_1); } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_2)) { PyObject *__pyx_temp[4] = {__pyx_t_3, __pyx_v_item, __pyx_v_start, __pyx_v_endex}; __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_2, __pyx_temp+1-__pyx_t_4, 3+__pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3570, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_GOTREF(__pyx_t_1); } else #endif { __pyx_t_5 = PyTuple_New(3+__pyx_t_4); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 3570, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); if (__pyx_t_3) { __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_3); __pyx_t_3 = NULL; } __Pyx_INCREF(__pyx_v_item); __Pyx_GIVEREF(__pyx_v_item); PyTuple_SET_ITEM(__pyx_t_5, 0+__pyx_t_4, __pyx_v_item); __Pyx_INCREF(__pyx_v_start); __Pyx_GIVEREF(__pyx_v_start); PyTuple_SET_ITEM(__pyx_t_5, 1+__pyx_t_4, __pyx_v_start); __Pyx_INCREF(__pyx_v_endex); __Pyx_GIVEREF(__pyx_v_endex); PyTuple_SET_ITEM(__pyx_t_5, 2+__pyx_t_4, __pyx_v_endex); __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_5, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3570, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; } __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_v_offset = __pyx_t_1; __pyx_t_1 = 0;
+3571: if offset >= 0:
__pyx_t_1 = PyObject_RichCompare(__pyx_v_offset, __pyx_int_0, Py_GE); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3571, __pyx_L1_error) __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(0, 3571, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; if (__pyx_t_6) { /* … */ }
+3572: return offset
__Pyx_XDECREF(__pyx_r); __Pyx_INCREF(__pyx_v_offset); __pyx_r = __pyx_v_offset; goto __pyx_L0;
3573: else:
+3574: return None
/*else*/ { __Pyx_XDECREF(__pyx_r); __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; }
3575:
+3576: def rofind(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_33rofind(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_32rofind[] = "Memory.rofind(self: u'Memory', item: Union[AnyBytes, Value], start: Optional[Address] = None, endex: Optional[Address] = None) -> Optional[Address]\nIndex of an item, reversed search.\n\n Arguments:\n item (items):\n Value to find. Can be either some byte string or an integer.\n\n start (int):\n Inclusive start of the searched range.\n If ``None``, :attr:`start` is considered.\n\n endex (int):\n Exclusive end of the searched range.\n If ``None``, :attr:`endex` is considered.\n\n Returns:\n int: The index of the last item equal to `value`, or ``None``.\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_33rofind(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_item = 0; PyObject *__pyx_v_start = 0; PyObject *__pyx_v_endex = 0; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("rofind (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_item,&__pyx_n_s_start,&__pyx_n_s_endex,0}; PyObject* values[3] = {0,0,0}; /* … */ /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_32rofind(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_item, PyObject *__pyx_v_start, PyObject *__pyx_v_endex) { PyObject *__pyx_v_offset = NULL; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("rofind", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_5); __Pyx_AddTraceback("bytesparse._c.Memory.rofind", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XDECREF(__pyx_v_offset); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
3577: self: 'Memory',
3578: item: Union[AnyBytes, Value],
+3579: start: Optional[Address] = None,
values[1] = ((PyObject *)Py_None);
+3580: endex: Optional[Address] = None,
values[2] = ((PyObject *)Py_None); if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_item)) != 0)) kw_args--; else goto __pyx_L5_argtuple_error; CYTHON_FALLTHROUGH; case 1: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_start); if (value) { values[1] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 2: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_endex); if (value) { values[2] = value; kw_args--; } } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "rofind") < 0)) __PYX_ERR(0, 3576, __pyx_L3_error) } } else { switch (PyTuple_GET_SIZE(__pyx_args)) { case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); break; default: goto __pyx_L5_argtuple_error; } } __pyx_v_item = values[0]; __pyx_v_start = values[1]; __pyx_v_endex = values[2]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("rofind", 0, 1, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 3576, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("bytesparse._c.Memory.rofind", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_32rofind(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), __pyx_v_item, __pyx_v_start, __pyx_v_endex);
3581: ) -> Optional[Address]:
3582: r"""Index of an item, reversed search.
3583:
3584: Arguments:
3585: item (items):
3586: Value to find. Can be either some byte string or an integer.
3587:
3588: start (int):
3589: Inclusive start of the searched range.
3590: If ``None``, :attr:`start` is considered.
3591:
3592: endex (int):
3593: Exclusive end of the searched range.
3594: If ``None``, :attr:`endex` is considered.
3595:
3596: Returns:
3597: int: The index of the last item equal to `value`, or ``None``.
3598: """
3599:
+3600: offset = self.rfind(item, start, endex)
__pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_rfind); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 3600, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_3 = NULL; __pyx_t_4 = 0; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); if (likely(__pyx_t_3)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); __Pyx_INCREF(__pyx_t_3); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_2, function); __pyx_t_4 = 1; } } #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_2)) { PyObject *__pyx_temp[4] = {__pyx_t_3, __pyx_v_item, __pyx_v_start, __pyx_v_endex}; __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_2, __pyx_temp+1-__pyx_t_4, 3+__pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3600, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_GOTREF(__pyx_t_1); } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_2)) { PyObject *__pyx_temp[4] = {__pyx_t_3, __pyx_v_item, __pyx_v_start, __pyx_v_endex}; __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_2, __pyx_temp+1-__pyx_t_4, 3+__pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3600, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_GOTREF(__pyx_t_1); } else #endif { __pyx_t_5 = PyTuple_New(3+__pyx_t_4); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 3600, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); if (__pyx_t_3) { __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_3); __pyx_t_3 = NULL; } __Pyx_INCREF(__pyx_v_item); __Pyx_GIVEREF(__pyx_v_item); PyTuple_SET_ITEM(__pyx_t_5, 0+__pyx_t_4, __pyx_v_item); __Pyx_INCREF(__pyx_v_start); __Pyx_GIVEREF(__pyx_v_start); PyTuple_SET_ITEM(__pyx_t_5, 1+__pyx_t_4, __pyx_v_start); __Pyx_INCREF(__pyx_v_endex); __Pyx_GIVEREF(__pyx_v_endex); PyTuple_SET_ITEM(__pyx_t_5, 2+__pyx_t_4, __pyx_v_endex); __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_5, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3600, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; } __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_v_offset = __pyx_t_1; __pyx_t_1 = 0;
+3601: if offset >= 0:
__pyx_t_1 = PyObject_RichCompare(__pyx_v_offset, __pyx_int_0, Py_GE); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3601, __pyx_L1_error) __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(0, 3601, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; if (__pyx_t_6) { /* … */ }
+3602: return offset
__Pyx_XDECREF(__pyx_r); __Pyx_INCREF(__pyx_v_offset); __pyx_r = __pyx_v_offset; goto __pyx_L0;
3603: else:
+3604: return None
/*else*/ { __Pyx_XDECREF(__pyx_r); __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; }
3605:
+3606: cdef saddr_t find_unbounded_(self, size_t size, const byte_t* buffer) except -2:
static saddr_t __pyx_f_10bytesparse_2_c_6Memory_find_unbounded_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, size_t __pyx_v_size, byte_t const *__pyx_v_buffer) { Rack_ const *__pyx_v_blocks; size_t __pyx_v_block_index; Block_ const *__pyx_v_block; Py_ssize_t __pyx_v_offset; saddr_t __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("find_unbounded_", 0); /* … */ /* function exit code */ __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
3607: cdef:
+3608: const Rack_* blocks = self._
__pyx_t_1 = __pyx_v_self->_; __pyx_v_blocks = __pyx_t_1;
3609: size_t block_index
3610: const Block_* block
3611: ssize_t offset
3612:
+3613: if size:
__pyx_t_2 = (__pyx_v_size != 0); if (__pyx_t_2) { /* … */ }
+3614: for block_index in range(Rack_Length(blocks)):
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks); __pyx_t_4 = __pyx_t_3; for (__pyx_t_5 = 0; __pyx_t_5 < __pyx_t_4; __pyx_t_5+=1) { __pyx_v_block_index = __pyx_t_5;
+3615: block = Rack_Get__(blocks, block_index)
__pyx_v_block = __pyx_f_10bytesparse_2_c_Rack_Get__(__pyx_v_blocks, __pyx_v_block_index);
+3616: offset = Block_Find_(block, 0, SIZE_MAX, size, buffer)
__pyx_v_offset = __pyx_f_10bytesparse_2_c_Block_Find_(__pyx_v_block, 0, SIZE_MAX, __pyx_v_size, __pyx_v_buffer);
+3617: if offset >= 0:
__pyx_t_2 = ((__pyx_v_offset >= 0) != 0); if (__pyx_t_2) { /* … */ } }
+3618: return Block_Start(block) + <size_t>offset
__pyx_r = (__pyx_f_10bytesparse_2_c_Block_Start(__pyx_v_block) + ((size_t)__pyx_v_offset)); goto __pyx_L0;
+3619: return -1
__pyx_r = -1L; goto __pyx_L0;
3620:
+3621: cdef saddr_t find_bounded_(self, size_t size, const byte_t* buffer, addr_t start, addr_t endex) except -2:
static saddr_t __pyx_f_10bytesparse_2_c_6Memory_find_bounded_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, size_t __pyx_v_size, byte_t const *__pyx_v_buffer, addr_t __pyx_v_start, addr_t __pyx_v_endex) { Rack_ const *__pyx_v_blocks; size_t __pyx_v_block_index; Block_ const *__pyx_v_block; Py_ssize_t __pyx_v_offset; size_t __pyx_v_block_index_start; size_t __pyx_v_block_index_endex; size_t __pyx_v_slice_start; size_t __pyx_v_slice_endex; saddr_t __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("find_bounded_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Memory.find_bounded_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -2L; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
3622: cdef:
+3623: const Rack_* blocks = self._
__pyx_t_1 = __pyx_v_self->_; __pyx_v_blocks = __pyx_t_1;
3624: size_t block_index
3625: const Block_* block
3626: ssize_t offset
3627: size_t block_index_start
3628: size_t block_index_endex
3629: size_t slice_start
3630: size_t slice_endex
3631:
+3632: if size:
__pyx_t_2 = (__pyx_v_size != 0); if (__pyx_t_2) { /* … */ }
+3633: if endex < start:
__pyx_t_2 = ((__pyx_v_endex < __pyx_v_start) != 0); if (__pyx_t_2) { /* … */ }
+3634: endex = start
__pyx_v_endex = __pyx_v_start;
+3635: block_index_start = Rack_IndexStart(blocks, start)
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Rack_IndexStart(__pyx_v_blocks, __pyx_v_start); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-2L))) __PYX_ERR(0, 3635, __pyx_L1_error)
__pyx_v_block_index_start = __pyx_t_3;
+3636: block_index_endex = Rack_IndexEndex(blocks, endex)
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Rack_IndexEndex(__pyx_v_blocks, __pyx_v_endex); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-2L))) __PYX_ERR(0, 3636, __pyx_L1_error)
__pyx_v_block_index_endex = __pyx_t_3;
3637:
+3638: for block_index in range(block_index_start, block_index_endex):
__pyx_t_4 = __pyx_v_block_index_endex; __pyx_t_5 = __pyx_t_4; for (__pyx_t_6 = __pyx_v_block_index_start; __pyx_t_6 < __pyx_t_5; __pyx_t_6+=1) { __pyx_v_block_index = __pyx_t_6;
+3639: block = Rack_Get__(blocks, block_index)
__pyx_v_block = __pyx_f_10bytesparse_2_c_Rack_Get__(__pyx_v_blocks, __pyx_v_block_index);
+3640: slice_start, slice_endex = Block_BoundAddressSliceToOffset(block, start, endex)
__pyx_t_7 = __pyx_f_10bytesparse_2_c_Block_BoundAddressSliceToOffset(__pyx_v_block, __pyx_v_start, __pyx_v_endex); __pyx_t_8 = __pyx_t_7.f0; __pyx_t_9 = __pyx_t_7.f1; __pyx_v_slice_start = __pyx_t_8; __pyx_v_slice_endex = __pyx_t_9;
+3641: offset = Block_Find_(block, slice_start, slice_endex, size, buffer)
__pyx_v_offset = __pyx_f_10bytesparse_2_c_Block_Find_(__pyx_v_block, __pyx_v_slice_start, __pyx_v_slice_endex, __pyx_v_size, __pyx_v_buffer);
+3642: if offset >= 0:
__pyx_t_2 = ((__pyx_v_offset >= 0) != 0); if (__pyx_t_2) { /* … */ } }
+3643: return Block_Start(block) + <size_t>offset
__pyx_r = (__pyx_f_10bytesparse_2_c_Block_Start(__pyx_v_block) + ((size_t)__pyx_v_offset)); goto __pyx_L0;
+3644: return -1
__pyx_r = -1L; goto __pyx_L0;
3645:
+3646: def find(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_35find(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_34find[] = "Memory.find(self: u'Memory', item: Union[AnyBytes, Value], start: Optional[Address] = None, endex: Optional[Address] = None) -> Address\nIndex of an item.\n\n Arguments:\n item (items):\n Value to find. Can be either some byte string or an integer.\n\n start (int):\n Inclusive start of the searched range.\n If ``None``, :attr:`endex` is considered.\n\n endex (int):\n Exclusive end of the searched range.\n If ``None``, :attr:`endex` is considered.\n\n Returns:\n int: The index of the first item equal to `value`, or -1.\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_35find(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_item = 0; PyObject *__pyx_v_start = 0; PyObject *__pyx_v_endex = 0; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("find (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_item,&__pyx_n_s_start,&__pyx_n_s_endex,0}; PyObject* values[3] = {0,0,0}; /* … */ /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_34find(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_item, PyObject *__pyx_v_start, PyObject *__pyx_v_endex) { addr_t __pyx_v_start_; addr_t __pyx_v_endex_; byte_t __pyx_v_item_value; __Pyx_memviewslice __pyx_v_item_view = { 0, 0, { 0 }, { 0 }, { 0 } }; size_t __pyx_v_item_size; byte_t const *__pyx_v_item_ptr; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("find", 0); /* … */ /* function exit code */ __pyx_L1_error:; __PYX_XDEC_MEMVIEW(&__pyx_t_4, 1); __Pyx_XDECREF(__pyx_t_8); __Pyx_AddTraceback("bytesparse._c.Memory.find", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __PYX_XDEC_MEMVIEW(&__pyx_v_item_view, 1); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
3647: self: 'Memory',
3648: item: Union[AnyBytes, Value],
+3649: start: Optional[Address] = None,
values[1] = ((PyObject *)Py_None);
+3650: endex: Optional[Address] = None,
values[2] = ((PyObject *)Py_None); if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_item)) != 0)) kw_args--; else goto __pyx_L5_argtuple_error; CYTHON_FALLTHROUGH; case 1: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_start); if (value) { values[1] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 2: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_endex); if (value) { values[2] = value; kw_args--; } } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "find") < 0)) __PYX_ERR(0, 3646, __pyx_L3_error) } } else { switch (PyTuple_GET_SIZE(__pyx_args)) { case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); break; default: goto __pyx_L5_argtuple_error; } } __pyx_v_item = values[0]; __pyx_v_start = values[1]; __pyx_v_endex = values[2]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("find", 0, 1, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 3646, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("bytesparse._c.Memory.find", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_34find(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), __pyx_v_item, __pyx_v_start, __pyx_v_endex);
3651: ) -> Address:
3652: r"""Index of an item.
3653:
3654: Arguments:
3655: item (items):
3656: Value to find. Can be either some byte string or an integer.
3657:
3658: start (int):
3659: Inclusive start of the searched range.
3660: If ``None``, :attr:`endex` is considered.
3661:
3662: endex (int):
3663: Exclusive end of the searched range.
3664: If ``None``, :attr:`endex` is considered.
3665:
3666: Returns:
3667: int: The index of the first item equal to `value`, or -1.
3668: """
3669: cdef:
3670: addr_t start_
3671: addr_t endex_
3672: byte_t item_value
3673: const byte_t[:] item_view
3674: size_t item_size
3675: const byte_t* item_ptr
3676:
+3677: if isinstance(item, int):
__pyx_t_1 = PyInt_Check(__pyx_v_item);
__pyx_t_2 = (__pyx_t_1 != 0);
if (__pyx_t_2) {
/* … */
goto __pyx_L3;
}
+3678: item_value = <byte_t>item
__pyx_t_3 = __Pyx_PyInt_As_byte_t(__pyx_v_item); if (unlikely((__pyx_t_3 == ((byte_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 3678, __pyx_L1_error) __pyx_v_item_value = ((byte_t)__pyx_t_3);
+3679: item_size = 1
__pyx_v_item_size = 1;
+3680: item_ptr = &item_value
__pyx_v_item_ptr = (&__pyx_v_item_value);
3681: else:
+3682: item_view = item
/*else*/ { __pyx_t_4 = __Pyx_PyObject_to_MemoryviewSlice_ds_nn_byte_t__const__(__pyx_v_item, 0); if (unlikely(!__pyx_t_4.memview)) __PYX_ERR(0, 3682, __pyx_L1_error) __pyx_v_item_view = __pyx_t_4; __pyx_t_4.memview = NULL; __pyx_t_4.data = NULL;
+3683: item_size = 1
__pyx_v_item_size = 1;
3684: with cython.boundscheck(False):
+3685: item_ptr = &item_view[0]
__pyx_t_5 = 0; if (__pyx_t_5 < 0) __pyx_t_5 += __pyx_v_item_view.shape[0]; __pyx_v_item_ptr = (&(*((byte_t const *) ( /* dim=0 */ (__pyx_v_item_view.data + __pyx_t_5 * __pyx_v_item_view.strides[0]) )))); } __pyx_L3:;
3686:
3687: # Faster code for unbounded slice
+3688: if start is None and endex is None:
__pyx_t_1 = (__pyx_v_start == Py_None); __pyx_t_6 = (__pyx_t_1 != 0); if (__pyx_t_6) { } else { __pyx_t_2 = __pyx_t_6; goto __pyx_L5_bool_binop_done; } __pyx_t_6 = (__pyx_v_endex == Py_None); __pyx_t_1 = (__pyx_t_6 != 0); __pyx_t_2 = __pyx_t_1; __pyx_L5_bool_binop_done:; if (__pyx_t_2) { /* … */ }
+3689: return self.find_unbounded_(item_size, item_ptr)
__Pyx_XDECREF(__pyx_r); __pyx_t_7 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->find_unbounded_(__pyx_v_self, __pyx_v_item_size, __pyx_v_item_ptr); if (unlikely(__pyx_t_7 == ((saddr_t)-2L))) __PYX_ERR(0, 3689, __pyx_L1_error) __pyx_t_8 = __Pyx_PyInt_From_int_fast64_t(__pyx_t_7); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 3689, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_8); __pyx_r = __pyx_t_8; __pyx_t_8 = 0; goto __pyx_L0;
3690:
3691: # Bounded slice
+3692: start_, endex_ = self.bound_(start, endex)
__pyx_t_9 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->bound_(__pyx_v_self, __pyx_v_start, __pyx_v_endex); __pyx_t_10 = __pyx_t_9.f0; __pyx_t_11 = __pyx_t_9.f1; __pyx_v_start_ = __pyx_t_10; __pyx_v_endex_ = __pyx_t_11;
+3693: return self.find_bounded_(item_size, item_ptr, start_, endex_)
__Pyx_XDECREF(__pyx_r); __pyx_t_7 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->find_bounded_(__pyx_v_self, __pyx_v_item_size, __pyx_v_item_ptr, __pyx_v_start_, __pyx_v_endex_); if (unlikely(__pyx_t_7 == ((saddr_t)-2L))) __PYX_ERR(0, 3693, __pyx_L1_error) __pyx_t_8 = __Pyx_PyInt_From_int_fast64_t(__pyx_t_7); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 3693, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_8); __pyx_r = __pyx_t_8; __pyx_t_8 = 0; goto __pyx_L0;
3694:
+3695: cdef saddr_t rfind_unbounded_(self, size_t size, const byte_t* buffer) except -2:
static saddr_t __pyx_f_10bytesparse_2_c_6Memory_rfind_unbounded_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, size_t __pyx_v_size, byte_t const *__pyx_v_buffer) { Rack_ const *__pyx_v_blocks; size_t __pyx_v_block_index; Block_ const *__pyx_v_block; Py_ssize_t __pyx_v_offset; saddr_t __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("rfind_unbounded_", 0); /* … */ /* function exit code */ __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
3696: cdef:
+3697: const Rack_* blocks = self._
__pyx_t_1 = __pyx_v_self->_; __pyx_v_blocks = __pyx_t_1;
3698: size_t block_index
3699: const Block_* block
3700: ssize_t offset
3701:
+3702: if size:
__pyx_t_2 = (__pyx_v_size != 0); if (__pyx_t_2) { /* … */ }
+3703: for block_index in range(Rack_Length(blocks), 0, -1):
for (__pyx_t_3 = __pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks) + 1; __pyx_t_3 > 0 + 1; ) { __pyx_t_3-=1; __pyx_v_block_index = __pyx_t_3;
+3704: block = Rack_Get__(blocks, block_index - 1)
__pyx_v_block = __pyx_f_10bytesparse_2_c_Rack_Get__(__pyx_v_blocks, (__pyx_v_block_index - 1));
+3705: offset = Block_ReverseFind_(block, 0, SIZE_MAX, size, buffer)
__pyx_v_offset = __pyx_f_10bytesparse_2_c_Block_ReverseFind_(__pyx_v_block, 0, SIZE_MAX, __pyx_v_size, __pyx_v_buffer);
+3706: if offset >= 0:
__pyx_t_2 = ((__pyx_v_offset >= 0) != 0); if (__pyx_t_2) { /* … */ } }
+3707: return Block_Start(block) + <size_t>offset
__pyx_r = (__pyx_f_10bytesparse_2_c_Block_Start(__pyx_v_block) + ((size_t)__pyx_v_offset)); goto __pyx_L0;
+3708: return -1
__pyx_r = -1L; goto __pyx_L0;
3709:
+3710: cdef saddr_t rfind_bounded_(self, size_t size, const byte_t* buffer, addr_t start, addr_t endex) except -2:
static saddr_t __pyx_f_10bytesparse_2_c_6Memory_rfind_bounded_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, size_t __pyx_v_size, byte_t const *__pyx_v_buffer, addr_t __pyx_v_start, addr_t __pyx_v_endex) { Rack_ const *__pyx_v_blocks; size_t __pyx_v_block_index; Block_ const *__pyx_v_block; Py_ssize_t __pyx_v_offset; size_t __pyx_v_block_index_start; size_t __pyx_v_block_index_endex; size_t __pyx_v_slice_start; size_t __pyx_v_slice_endex; saddr_t __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("rfind_bounded_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Memory.rfind_bounded_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -2L; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
3711: cdef:
+3712: const Rack_* blocks = self._
__pyx_t_1 = __pyx_v_self->_; __pyx_v_blocks = __pyx_t_1;
3713: size_t block_index
3714: const Block_* block
3715: ssize_t offset
3716: size_t block_index_start
3717: size_t block_index_endex
3718: size_t slice_start
3719: size_t slice_endex
3720:
+3721: if size:
__pyx_t_2 = (__pyx_v_size != 0); if (__pyx_t_2) { /* … */ }
+3722: if endex < start:
__pyx_t_2 = ((__pyx_v_endex < __pyx_v_start) != 0); if (__pyx_t_2) { /* … */ }
+3723: endex = start
__pyx_v_endex = __pyx_v_start;
+3724: block_index_start = Rack_IndexStart(blocks, start)
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Rack_IndexStart(__pyx_v_blocks, __pyx_v_start); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-2L))) __PYX_ERR(0, 3724, __pyx_L1_error)
__pyx_v_block_index_start = __pyx_t_3;
+3725: block_index_endex = Rack_IndexEndex(blocks, endex)
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Rack_IndexEndex(__pyx_v_blocks, __pyx_v_endex); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-2L))) __PYX_ERR(0, 3725, __pyx_L1_error)
__pyx_v_block_index_endex = __pyx_t_3;
3726:
+3727: for block_index in range(block_index_endex, block_index_start, -1):
__pyx_t_4 = __pyx_v_block_index_start; __pyx_t_5 = __pyx_t_4; for (__pyx_t_6 = __pyx_v_block_index_endex + 1; __pyx_t_6 > __pyx_t_5 + 1; ) { __pyx_t_6-=1; __pyx_v_block_index = __pyx_t_6;
+3728: block = Rack_Get__(blocks, block_index - 1)
__pyx_v_block = __pyx_f_10bytesparse_2_c_Rack_Get__(__pyx_v_blocks, (__pyx_v_block_index - 1));
+3729: slice_start, slice_endex = Block_BoundAddressSliceToOffset(block, start, endex)
__pyx_t_7 = __pyx_f_10bytesparse_2_c_Block_BoundAddressSliceToOffset(__pyx_v_block, __pyx_v_start, __pyx_v_endex); __pyx_t_8 = __pyx_t_7.f0; __pyx_t_9 = __pyx_t_7.f1; __pyx_v_slice_start = __pyx_t_8; __pyx_v_slice_endex = __pyx_t_9;
+3730: offset = Block_ReverseFind_(block, slice_start, slice_endex, size, buffer)
__pyx_v_offset = __pyx_f_10bytesparse_2_c_Block_ReverseFind_(__pyx_v_block, __pyx_v_slice_start, __pyx_v_slice_endex, __pyx_v_size, __pyx_v_buffer);
+3731: if offset >= 0:
__pyx_t_2 = ((__pyx_v_offset >= 0) != 0); if (__pyx_t_2) { /* … */ } }
+3732: return Block_Start(block) + <size_t>offset
__pyx_r = (__pyx_f_10bytesparse_2_c_Block_Start(__pyx_v_block) + ((size_t)__pyx_v_offset)); goto __pyx_L0;
+3733: return -1
__pyx_r = -1L; goto __pyx_L0;
3734:
+3735: def rfind(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_37rfind(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_36rfind[] = "Memory.rfind(self: u'Memory', item: Union[AnyBytes, Value], start: Optional[Address] = None, endex: Optional[Address] = None) -> Address\nIndex of an item, reversed search.\n\n Arguments:\n item (items):\n Value to find. Can be either some byte string or an integer.\n\n start (int):\n Inclusive start of the searched range.\n If ``None``, :attr:`start` is considered.\n\n endex (int):\n Exclusive end of the searched range.\n If ``None``, :attr:`endex` is considered.\n\n Returns:\n int: The index of the last item equal to `value`, or -1.\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_37rfind(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_item = 0; PyObject *__pyx_v_start = 0; PyObject *__pyx_v_endex = 0; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("rfind (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_item,&__pyx_n_s_start,&__pyx_n_s_endex,0}; PyObject* values[3] = {0,0,0}; /* … */ /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_36rfind(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_item, PyObject *__pyx_v_start, PyObject *__pyx_v_endex) { addr_t __pyx_v_start_; addr_t __pyx_v_endex_; byte_t __pyx_v_item_value; __Pyx_memviewslice __pyx_v_item_view = { 0, 0, { 0 }, { 0 }, { 0 } }; size_t __pyx_v_item_size; byte_t const *__pyx_v_item_ptr; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("rfind", 0); /* … */ /* function exit code */ __pyx_L1_error:; __PYX_XDEC_MEMVIEW(&__pyx_t_4, 1); __Pyx_XDECREF(__pyx_t_8); __Pyx_AddTraceback("bytesparse._c.Memory.rfind", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __PYX_XDEC_MEMVIEW(&__pyx_v_item_view, 1); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
3736: self: 'Memory',
3737: item: Union[AnyBytes, Value],
+3738: start: Optional[Address] = None,
values[1] = ((PyObject *)Py_None);
+3739: endex: Optional[Address] = None,
values[2] = ((PyObject *)Py_None); if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_item)) != 0)) kw_args--; else goto __pyx_L5_argtuple_error; CYTHON_FALLTHROUGH; case 1: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_start); if (value) { values[1] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 2: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_endex); if (value) { values[2] = value; kw_args--; } } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "rfind") < 0)) __PYX_ERR(0, 3735, __pyx_L3_error) } } else { switch (PyTuple_GET_SIZE(__pyx_args)) { case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); break; default: goto __pyx_L5_argtuple_error; } } __pyx_v_item = values[0]; __pyx_v_start = values[1]; __pyx_v_endex = values[2]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("rfind", 0, 1, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 3735, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("bytesparse._c.Memory.rfind", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_36rfind(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), __pyx_v_item, __pyx_v_start, __pyx_v_endex);
3740: ) -> Address:
3741: r"""Index of an item, reversed search.
3742:
3743: Arguments:
3744: item (items):
3745: Value to find. Can be either some byte string or an integer.
3746:
3747: start (int):
3748: Inclusive start of the searched range.
3749: If ``None``, :attr:`start` is considered.
3750:
3751: endex (int):
3752: Exclusive end of the searched range.
3753: If ``None``, :attr:`endex` is considered.
3754:
3755: Returns:
3756: int: The index of the last item equal to `value`, or -1.
3757: """
3758: cdef:
3759: addr_t start_
3760: addr_t endex_
3761: byte_t item_value
3762: const byte_t[:] item_view
3763: size_t item_size
3764: const byte_t* item_ptr
3765:
+3766: if isinstance(item, int):
__pyx_t_1 = PyInt_Check(__pyx_v_item);
__pyx_t_2 = (__pyx_t_1 != 0);
if (__pyx_t_2) {
/* … */
goto __pyx_L3;
}
+3767: item_value = <byte_t>item
__pyx_t_3 = __Pyx_PyInt_As_byte_t(__pyx_v_item); if (unlikely((__pyx_t_3 == ((byte_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 3767, __pyx_L1_error) __pyx_v_item_value = ((byte_t)__pyx_t_3);
+3768: item_size = 1
__pyx_v_item_size = 1;
+3769: item_ptr = &item_value
__pyx_v_item_ptr = (&__pyx_v_item_value);
3770: else:
+3771: item_view = item
/*else*/ { __pyx_t_4 = __Pyx_PyObject_to_MemoryviewSlice_ds_nn_byte_t__const__(__pyx_v_item, 0); if (unlikely(!__pyx_t_4.memview)) __PYX_ERR(0, 3771, __pyx_L1_error) __pyx_v_item_view = __pyx_t_4; __pyx_t_4.memview = NULL; __pyx_t_4.data = NULL;
+3772: item_size = 1
__pyx_v_item_size = 1;
3773: with cython.boundscheck(False):
+3774: item_ptr = &item_view[0]
__pyx_t_5 = 0; if (__pyx_t_5 < 0) __pyx_t_5 += __pyx_v_item_view.shape[0]; __pyx_v_item_ptr = (&(*((byte_t const *) ( /* dim=0 */ (__pyx_v_item_view.data + __pyx_t_5 * __pyx_v_item_view.strides[0]) )))); } __pyx_L3:;
3775:
3776: # Faster code for unbounded slice
+3777: if start is None and endex is None:
__pyx_t_1 = (__pyx_v_start == Py_None); __pyx_t_6 = (__pyx_t_1 != 0); if (__pyx_t_6) { } else { __pyx_t_2 = __pyx_t_6; goto __pyx_L5_bool_binop_done; } __pyx_t_6 = (__pyx_v_endex == Py_None); __pyx_t_1 = (__pyx_t_6 != 0); __pyx_t_2 = __pyx_t_1; __pyx_L5_bool_binop_done:; if (__pyx_t_2) { /* … */ }
+3778: return self.rfind_unbounded_(item_size, item_ptr)
__Pyx_XDECREF(__pyx_r); __pyx_t_7 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->rfind_unbounded_(__pyx_v_self, __pyx_v_item_size, __pyx_v_item_ptr); if (unlikely(__pyx_t_7 == ((saddr_t)-2L))) __PYX_ERR(0, 3778, __pyx_L1_error) __pyx_t_8 = __Pyx_PyInt_From_int_fast64_t(__pyx_t_7); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 3778, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_8); __pyx_r = __pyx_t_8; __pyx_t_8 = 0; goto __pyx_L0;
3779:
3780: # Bounded slice
+3781: start_, endex_ = self.bound_(start, endex)
__pyx_t_9 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->bound_(__pyx_v_self, __pyx_v_start, __pyx_v_endex); __pyx_t_10 = __pyx_t_9.f0; __pyx_t_11 = __pyx_t_9.f1; __pyx_v_start_ = __pyx_t_10; __pyx_v_endex_ = __pyx_t_11;
+3782: return self.rfind_bounded_(item_size, item_ptr, start_, endex_)
__Pyx_XDECREF(__pyx_r); __pyx_t_7 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->rfind_bounded_(__pyx_v_self, __pyx_v_item_size, __pyx_v_item_ptr, __pyx_v_start_, __pyx_v_endex_); if (unlikely(__pyx_t_7 == ((saddr_t)-2L))) __PYX_ERR(0, 3782, __pyx_L1_error) __pyx_t_8 = __Pyx_PyInt_From_int_fast64_t(__pyx_t_7); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 3782, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_8); __pyx_r = __pyx_t_8; __pyx_t_8 = 0; goto __pyx_L0;
3783:
+3784: def index(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_39index(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_38index[] = "Memory.index(self: u'Memory', item: Union[AnyBytes, Value], start: Optional[Address] = None, endex: Optional[Address] = None) -> Address\nIndex of an item.\n\n Arguments:\n item (items):\n Value to find. Can be either some byte string or an integer.\n\n start (int):\n Inclusive start of the searched range.\n If ``None``, :attr:`start` is considered.\n\n endex (int):\n Exclusive end of the searched range.\n If ``None``, :attr:`endex` is considered.\n\n Returns:\n int: The index of the first item equal to `value`.\n\n Raises:\n :obj:`ValueError`: Item not found.\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_39index(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_item = 0; PyObject *__pyx_v_start = 0; PyObject *__pyx_v_endex = 0; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("index (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_item,&__pyx_n_s_start,&__pyx_n_s_endex,0}; PyObject* values[3] = {0,0,0}; /* … */ /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_38index(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_item, PyObject *__pyx_v_start, PyObject *__pyx_v_endex) { PyObject *__pyx_v_offset = NULL; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("index", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_5); __Pyx_AddTraceback("bytesparse._c.Memory.index", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XDECREF(__pyx_v_offset); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
3785: self: 'Memory',
3786: item: Union[AnyBytes, Value],
+3787: start: Optional[Address] = None,
values[1] = ((PyObject *)Py_None);
+3788: endex: Optional[Address] = None,
values[2] = ((PyObject *)Py_None); if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_item)) != 0)) kw_args--; else goto __pyx_L5_argtuple_error; CYTHON_FALLTHROUGH; case 1: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_start); if (value) { values[1] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 2: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_endex); if (value) { values[2] = value; kw_args--; } } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "index") < 0)) __PYX_ERR(0, 3784, __pyx_L3_error) } } else { switch (PyTuple_GET_SIZE(__pyx_args)) { case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); break; default: goto __pyx_L5_argtuple_error; } } __pyx_v_item = values[0]; __pyx_v_start = values[1]; __pyx_v_endex = values[2]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("index", 0, 1, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 3784, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("bytesparse._c.Memory.index", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_38index(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), __pyx_v_item, __pyx_v_start, __pyx_v_endex);
3789: ) -> Address:
3790: r"""Index of an item.
3791:
3792: Arguments:
3793: item (items):
3794: Value to find. Can be either some byte string or an integer.
3795:
3796: start (int):
3797: Inclusive start of the searched range.
3798: If ``None``, :attr:`start` is considered.
3799:
3800: endex (int):
3801: Exclusive end of the searched range.
3802: If ``None``, :attr:`endex` is considered.
3803:
3804: Returns:
3805: int: The index of the first item equal to `value`.
3806:
3807: Raises:
3808: :obj:`ValueError`: Item not found.
3809: """
3810:
+3811: offset = self.find(item, start, endex)
__pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_find); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 3811, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_3 = NULL; __pyx_t_4 = 0; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); if (likely(__pyx_t_3)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); __Pyx_INCREF(__pyx_t_3); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_2, function); __pyx_t_4 = 1; } } #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_2)) { PyObject *__pyx_temp[4] = {__pyx_t_3, __pyx_v_item, __pyx_v_start, __pyx_v_endex}; __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_2, __pyx_temp+1-__pyx_t_4, 3+__pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3811, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_GOTREF(__pyx_t_1); } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_2)) { PyObject *__pyx_temp[4] = {__pyx_t_3, __pyx_v_item, __pyx_v_start, __pyx_v_endex}; __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_2, __pyx_temp+1-__pyx_t_4, 3+__pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3811, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_GOTREF(__pyx_t_1); } else #endif { __pyx_t_5 = PyTuple_New(3+__pyx_t_4); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 3811, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); if (__pyx_t_3) { __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_3); __pyx_t_3 = NULL; } __Pyx_INCREF(__pyx_v_item); __Pyx_GIVEREF(__pyx_v_item); PyTuple_SET_ITEM(__pyx_t_5, 0+__pyx_t_4, __pyx_v_item); __Pyx_INCREF(__pyx_v_start); __Pyx_GIVEREF(__pyx_v_start); PyTuple_SET_ITEM(__pyx_t_5, 1+__pyx_t_4, __pyx_v_start); __Pyx_INCREF(__pyx_v_endex); __Pyx_GIVEREF(__pyx_v_endex); PyTuple_SET_ITEM(__pyx_t_5, 2+__pyx_t_4, __pyx_v_endex); __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_5, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3811, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; } __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_v_offset = __pyx_t_1; __pyx_t_1 = 0;
+3812: if offset >= 0:
__pyx_t_1 = PyObject_RichCompare(__pyx_v_offset, __pyx_int_0, Py_GE); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3812, __pyx_L1_error) __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(0, 3812, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; if (likely(__pyx_t_6)) { /* … */ }
+3813: return offset
__Pyx_XDECREF(__pyx_r); __Pyx_INCREF(__pyx_v_offset); __pyx_r = __pyx_v_offset; goto __pyx_L0;
3814: else:
+3815: raise ValueError('subsection not found')
/*else*/ { __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__20, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3815, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_Raise(__pyx_t_1, 0, 0, 0); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __PYX_ERR(0, 3815, __pyx_L1_error) } /* … */ __pyx_tuple__20 = PyTuple_Pack(1, __pyx_kp_u_subsection_not_found); if (unlikely(!__pyx_tuple__20)) __PYX_ERR(0, 3815, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__20); __Pyx_GIVEREF(__pyx_tuple__20);
3816:
+3817: def rindex(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_41rindex(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_40rindex[] = "Memory.rindex(self: u'Memory', item: Union[AnyBytes, Value], start: Optional[Address] = None, endex: Optional[Address] = None) -> Address\nIndex of an item, reversed search.\n\n Arguments:\n item (items):\n Value to find. Can be either some byte string or an integer.\n\n start (int):\n Inclusive start of the searched range.\n If ``None``, :attr:`start` is considered.\n\n endex (int):\n Exclusive end of the searched range.\n If ``None``, :attr:`endex` is considered.\n\n Returns:\n int: The index of the last item equal to `value`.\n\n Raises:\n :obj:`ValueError`: Item not found.\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_41rindex(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_item = 0; PyObject *__pyx_v_start = 0; PyObject *__pyx_v_endex = 0; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("rindex (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_item,&__pyx_n_s_start,&__pyx_n_s_endex,0}; PyObject* values[3] = {0,0,0}; /* … */ /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_40rindex(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_item, PyObject *__pyx_v_start, PyObject *__pyx_v_endex) { PyObject *__pyx_v_offset = NULL; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("rindex", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_5); __Pyx_AddTraceback("bytesparse._c.Memory.rindex", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XDECREF(__pyx_v_offset); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
3818: self: 'Memory',
3819: item: Union[AnyBytes, Value],
+3820: start: Optional[Address] = None,
values[1] = ((PyObject *)Py_None);
+3821: endex: Optional[Address] = None,
values[2] = ((PyObject *)Py_None); if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_item)) != 0)) kw_args--; else goto __pyx_L5_argtuple_error; CYTHON_FALLTHROUGH; case 1: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_start); if (value) { values[1] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 2: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_endex); if (value) { values[2] = value; kw_args--; } } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "rindex") < 0)) __PYX_ERR(0, 3817, __pyx_L3_error) } } else { switch (PyTuple_GET_SIZE(__pyx_args)) { case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); break; default: goto __pyx_L5_argtuple_error; } } __pyx_v_item = values[0]; __pyx_v_start = values[1]; __pyx_v_endex = values[2]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("rindex", 0, 1, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 3817, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("bytesparse._c.Memory.rindex", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_40rindex(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), __pyx_v_item, __pyx_v_start, __pyx_v_endex);
3822: ) -> Address:
3823: r"""Index of an item, reversed search.
3824:
3825: Arguments:
3826: item (items):
3827: Value to find. Can be either some byte string or an integer.
3828:
3829: start (int):
3830: Inclusive start of the searched range.
3831: If ``None``, :attr:`start` is considered.
3832:
3833: endex (int):
3834: Exclusive end of the searched range.
3835: If ``None``, :attr:`endex` is considered.
3836:
3837: Returns:
3838: int: The index of the last item equal to `value`.
3839:
3840: Raises:
3841: :obj:`ValueError`: Item not found.
3842: """
3843:
+3844: offset = self.rfind(item, start, endex)
__pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_rfind); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 3844, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_3 = NULL; __pyx_t_4 = 0; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); if (likely(__pyx_t_3)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); __Pyx_INCREF(__pyx_t_3); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_2, function); __pyx_t_4 = 1; } } #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_2)) { PyObject *__pyx_temp[4] = {__pyx_t_3, __pyx_v_item, __pyx_v_start, __pyx_v_endex}; __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_2, __pyx_temp+1-__pyx_t_4, 3+__pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3844, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_GOTREF(__pyx_t_1); } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_2)) { PyObject *__pyx_temp[4] = {__pyx_t_3, __pyx_v_item, __pyx_v_start, __pyx_v_endex}; __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_2, __pyx_temp+1-__pyx_t_4, 3+__pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3844, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_GOTREF(__pyx_t_1); } else #endif { __pyx_t_5 = PyTuple_New(3+__pyx_t_4); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 3844, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); if (__pyx_t_3) { __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_3); __pyx_t_3 = NULL; } __Pyx_INCREF(__pyx_v_item); __Pyx_GIVEREF(__pyx_v_item); PyTuple_SET_ITEM(__pyx_t_5, 0+__pyx_t_4, __pyx_v_item); __Pyx_INCREF(__pyx_v_start); __Pyx_GIVEREF(__pyx_v_start); PyTuple_SET_ITEM(__pyx_t_5, 1+__pyx_t_4, __pyx_v_start); __Pyx_INCREF(__pyx_v_endex); __Pyx_GIVEREF(__pyx_v_endex); PyTuple_SET_ITEM(__pyx_t_5, 2+__pyx_t_4, __pyx_v_endex); __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_5, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3844, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; } __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_v_offset = __pyx_t_1; __pyx_t_1 = 0;
+3845: if offset >= 0:
__pyx_t_1 = PyObject_RichCompare(__pyx_v_offset, __pyx_int_0, Py_GE); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3845, __pyx_L1_error) __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(0, 3845, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; if (likely(__pyx_t_6)) { /* … */ }
+3846: return offset
__Pyx_XDECREF(__pyx_r); __Pyx_INCREF(__pyx_v_offset); __pyx_r = __pyx_v_offset; goto __pyx_L0;
3847: else:
+3848: raise ValueError('subsection not found')
/*else*/ { __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__20, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3848, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_Raise(__pyx_t_1, 0, 0, 0); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __PYX_ERR(0, 3848, __pyx_L1_error) }
3849:
+3850: def __contains__(
/* Python wrapper */ static int __pyx_pw_10bytesparse_2_c_6Memory_43__contains__(PyObject *__pyx_v_self, PyObject *__pyx_v_item); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_42__contains__[] = "Checks if some items are contained.\n\n Arguments:\n item (items):\n Items to find. Can be either some byte string or an integer.\n\n Returns:\n bool: Item is contained.\n\n Example:\n +---+---+---+---+---+---+---+---+---+---+---+---+\n | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11|\n +===+===+===+===+===+===+===+===+===+===+===+===+\n | |[A | B | C]| |[1 | 2 | 3]| |[x | y | z]|\n +---+---+---+---+---+---+---+---+---+---+---+---+\n\n >>> memory = Memory(blocks=[[1, b'ABC'], [5, b'123'], [9, b'xyz']])\n >>> b'23' in memory\n True\n >>> ord('y') in memory\n True\n >>> b'$' in memory\n False\n "; #if CYTHON_COMPILING_IN_CPYTHON struct wrapperbase __pyx_wrapperbase_10bytesparse_2_c_6Memory_42__contains__; #endif static int __pyx_pw_10bytesparse_2_c_6Memory_43__contains__(PyObject *__pyx_v_self, PyObject *__pyx_v_item) { int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__contains__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_42__contains__(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), ((PyObject *)__pyx_v_item)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static int __pyx_pf_10bytesparse_2_c_6Memory_42__contains__(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_item) { int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__contains__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("bytesparse._c.Memory.__contains__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
3851: self: 'Memory',
3852: item: Union[AnyBytes, Value],
3853: ) -> bool:
3854: r"""Checks if some items are contained.
3855:
3856: Arguments:
3857: item (items):
3858: Items to find. Can be either some byte string or an integer.
3859:
3860: Returns:
3861: bool: Item is contained.
3862:
3863: Example:
3864: +---+---+---+---+---+---+---+---+---+---+---+---+
3865: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11|
3866: +===+===+===+===+===+===+===+===+===+===+===+===+
3867: | |[A | B | C]| |[1 | 2 | 3]| |[x | y | z]|
3868: +---+---+---+---+---+---+---+---+---+---+---+---+
3869:
3870: >>> memory = Memory(blocks=[[1, b'ABC'], [5, b'123'], [9, b'xyz']])
3871: >>> b'23' in memory
3872: True
3873: >>> ord('y') in memory
3874: True
3875: >>> b'$' in memory
3876: False
3877: """
3878:
+3879: return self.find(item) >= 0
__pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_find); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 3879, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_3 = NULL; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); if (likely(__pyx_t_3)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); __Pyx_INCREF(__pyx_t_3); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_2, function); } } __pyx_t_1 = (__pyx_t_3) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_3, __pyx_v_item) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_v_item); __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3879, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_t_2 = PyObject_RichCompare(__pyx_t_1, __pyx_int_0, Py_GE); __Pyx_XGOTREF(__pyx_t_2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 3879, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_t_4 = __Pyx_PyInt_As_int(__pyx_t_2); if (unlikely((__pyx_t_4 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 3879, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_r = __pyx_t_4; goto __pyx_L0;
3880:
+3881: cdef addr_t count_unbounded_(self, size_t size, const byte_t* buffer) except -1:
static addr_t __pyx_f_10bytesparse_2_c_6Memory_count_unbounded_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, size_t __pyx_v_size, byte_t const *__pyx_v_buffer) { Rack_ const *__pyx_v_blocks; size_t __pyx_v_block_index; Block_ const *__pyx_v_block; addr_t __pyx_v_count; addr_t __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("count_unbounded_", 0); /* … */ /* function exit code */ __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
3882: cdef:
+3883: const Rack_* blocks = self._
__pyx_t_1 = __pyx_v_self->_; __pyx_v_blocks = __pyx_t_1;
3884: size_t block_index
3885: const Block_* block
+3886: addr_t count = 0
__pyx_v_count = 0;
3887:
+3888: if size:
__pyx_t_2 = (__pyx_v_size != 0); if (__pyx_t_2) { /* … */ }
+3889: for block_index in range(Rack_Length(blocks)):
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks); __pyx_t_4 = __pyx_t_3; for (__pyx_t_5 = 0; __pyx_t_5 < __pyx_t_4; __pyx_t_5+=1) { __pyx_v_block_index = __pyx_t_5;
+3890: block = Rack_Get__(blocks, block_index)
__pyx_v_block = __pyx_f_10bytesparse_2_c_Rack_Get__(__pyx_v_blocks, __pyx_v_block_index);
+3891: count += Block_Count_(block, 0, SIZE_MAX, size, buffer)
__pyx_v_count = (__pyx_v_count + __pyx_f_10bytesparse_2_c_Block_Count_(__pyx_v_block, 0, SIZE_MAX, __pyx_v_size, __pyx_v_buffer)); }
+3892: return count
__pyx_r = __pyx_v_count; goto __pyx_L0;
3893:
+3894: cdef addr_t count_bounded_(self, size_t size, const byte_t* buffer, addr_t start, addr_t endex) except -1:
static addr_t __pyx_f_10bytesparse_2_c_6Memory_count_bounded_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, size_t __pyx_v_size, byte_t const *__pyx_v_buffer, addr_t __pyx_v_start, addr_t __pyx_v_endex) { Rack_ const *__pyx_v_blocks; size_t __pyx_v_block_index; Block_ const *__pyx_v_block; addr_t __pyx_v_count; size_t __pyx_v_block_index_start; size_t __pyx_v_block_index_endex; size_t __pyx_v_slice_start; size_t __pyx_v_slice_endex; addr_t __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("count_bounded_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Memory.count_bounded_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1LL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
3895: cdef:
+3896: const Rack_* blocks = self._
__pyx_t_1 = __pyx_v_self->_; __pyx_v_blocks = __pyx_t_1;
3897: size_t block_index
3898: const Block_* block
+3899: addr_t count = 0
__pyx_v_count = 0;
3900: size_t block_index_start
3901: size_t block_index_endex
3902: addr_t block_start
3903: addr_t block_endex
3904: size_t slice_start
3905: size_t slice_endex
3906:
+3907: if size:
__pyx_t_2 = (__pyx_v_size != 0); if (__pyx_t_2) { /* … */ }
+3908: if endex < start:
__pyx_t_2 = ((__pyx_v_endex < __pyx_v_start) != 0); if (__pyx_t_2) { /* … */ }
+3909: endex = start
__pyx_v_endex = __pyx_v_start;
+3910: block_index_start = Rack_IndexStart(blocks, start)
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Rack_IndexStart(__pyx_v_blocks, __pyx_v_start); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-2L))) __PYX_ERR(0, 3910, __pyx_L1_error)
__pyx_v_block_index_start = __pyx_t_3;
+3911: block_index_endex = Rack_IndexEndex(blocks, endex)
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Rack_IndexEndex(__pyx_v_blocks, __pyx_v_endex); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-2L))) __PYX_ERR(0, 3911, __pyx_L1_error)
__pyx_v_block_index_endex = __pyx_t_3;
3912:
+3913: for block_index in range(block_index_start, block_index_endex):
__pyx_t_4 = __pyx_v_block_index_endex; __pyx_t_5 = __pyx_t_4; for (__pyx_t_6 = __pyx_v_block_index_start; __pyx_t_6 < __pyx_t_5; __pyx_t_6+=1) { __pyx_v_block_index = __pyx_t_6;
+3914: block = Rack_Get__(blocks, block_index)
__pyx_v_block = __pyx_f_10bytesparse_2_c_Rack_Get__(__pyx_v_blocks, __pyx_v_block_index);
+3915: slice_start, slice_endex = Block_BoundAddressSliceToOffset(block, start, endex)
__pyx_t_7 = __pyx_f_10bytesparse_2_c_Block_BoundAddressSliceToOffset(__pyx_v_block, __pyx_v_start, __pyx_v_endex); __pyx_t_8 = __pyx_t_7.f0; __pyx_t_9 = __pyx_t_7.f1; __pyx_v_slice_start = __pyx_t_8; __pyx_v_slice_endex = __pyx_t_9;
+3916: count += Block_Count_(block, slice_start, slice_endex, size, buffer)
__pyx_v_count = (__pyx_v_count + __pyx_f_10bytesparse_2_c_Block_Count_(__pyx_v_block, __pyx_v_slice_start, __pyx_v_slice_endex, __pyx_v_size, __pyx_v_buffer)); }
+3917: return count
__pyx_r = __pyx_v_count; goto __pyx_L0;
3918:
+3919: def count(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_45count(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_44count[] = "Memory.count(self: u'Memory', item: Union[AnyBytes, Value], start: Optional[Address] = None, endex: Optional[Address] = None) -> int\nCounts items.\n\n Arguments:\n item (items):\n Reference value to count.\n\n start (int):\n Inclusive start of the searched range.\n If ``None``, :attr:`start` is considered.\n\n endex (int):\n Exclusive end of the searched range.\n If ``None``, :attr:`endex` is considered.\n\n Returns:\n int: The number of items equal to `value`.\n\n Example:\n +---+---+---+---+---+---+---+---+---+---+---+---+\n | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11|\n +===+===+===+===+===+===+===+===+===+===+===+===+\n | |[A | B | C]| |[B | a | t]| |[t | a | b]|\n +---+---+---+---+---+---+---+---+---+---+---+---+\n\n >>> memory = Memory(blocks=[[1, b'ABC'], [5, b'Bat'], [9, b'tab']])\n >>> memory.count(b'a')\n 2\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_45count(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_item = 0; PyObject *__pyx_v_start = 0; PyObject *__pyx_v_endex = 0; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("count (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_item,&__pyx_n_s_start,&__pyx_n_s_endex,0}; PyObject* values[3] = {0,0,0}; /* … */ /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_44count(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_item, PyObject *__pyx_v_start, PyObject *__pyx_v_endex) { addr_t __pyx_v_start_; addr_t __pyx_v_endex_; byte_t __pyx_v_item_value; __Pyx_memviewslice __pyx_v_item_view = { 0, 0, { 0 }, { 0 }, { 0 } }; size_t __pyx_v_item_size; byte_t const *__pyx_v_item_ptr; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("count", 0); /* … */ /* function exit code */ __pyx_L1_error:; __PYX_XDEC_MEMVIEW(&__pyx_t_4, 1); __Pyx_XDECREF(__pyx_t_8); __Pyx_AddTraceback("bytesparse._c.Memory.count", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __PYX_XDEC_MEMVIEW(&__pyx_v_item_view, 1); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
3920: self: 'Memory',
3921: item: Union[AnyBytes, Value],
+3922: start: Optional[Address] = None,
values[1] = ((PyObject *)Py_None);
+3923: endex: Optional[Address] = None,
values[2] = ((PyObject *)Py_None); if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_item)) != 0)) kw_args--; else goto __pyx_L5_argtuple_error; CYTHON_FALLTHROUGH; case 1: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_start); if (value) { values[1] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 2: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_endex); if (value) { values[2] = value; kw_args--; } } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "count") < 0)) __PYX_ERR(0, 3919, __pyx_L3_error) } } else { switch (PyTuple_GET_SIZE(__pyx_args)) { case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); break; default: goto __pyx_L5_argtuple_error; } } __pyx_v_item = values[0]; __pyx_v_start = values[1]; __pyx_v_endex = values[2]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("count", 0, 1, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 3919, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("bytesparse._c.Memory.count", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_44count(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), __pyx_v_item, __pyx_v_start, __pyx_v_endex);
3924: ) -> int:
3925: r"""Counts items.
3926:
3927: Arguments:
3928: item (items):
3929: Reference value to count.
3930:
3931: start (int):
3932: Inclusive start of the searched range.
3933: If ``None``, :attr:`start` is considered.
3934:
3935: endex (int):
3936: Exclusive end of the searched range.
3937: If ``None``, :attr:`endex` is considered.
3938:
3939: Returns:
3940: int: The number of items equal to `value`.
3941:
3942: Example:
3943: +---+---+---+---+---+---+---+---+---+---+---+---+
3944: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11|
3945: +===+===+===+===+===+===+===+===+===+===+===+===+
3946: | |[A | B | C]| |[B | a | t]| |[t | a | b]|
3947: +---+---+---+---+---+---+---+---+---+---+---+---+
3948:
3949: >>> memory = Memory(blocks=[[1, b'ABC'], [5, b'Bat'], [9, b'tab']])
3950: >>> memory.count(b'a')
3951: 2
3952: """
3953: cdef:
3954: addr_t start_
3955: addr_t endex_
3956: byte_t item_value
3957: const byte_t[:] item_view
3958: size_t item_size
3959: const byte_t* item_ptr
3960:
+3961: if isinstance(item, int):
__pyx_t_1 = PyInt_Check(__pyx_v_item);
__pyx_t_2 = (__pyx_t_1 != 0);
if (__pyx_t_2) {
/* … */
goto __pyx_L3;
}
+3962: item_value = <byte_t>item
__pyx_t_3 = __Pyx_PyInt_As_byte_t(__pyx_v_item); if (unlikely((__pyx_t_3 == ((byte_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 3962, __pyx_L1_error) __pyx_v_item_value = ((byte_t)__pyx_t_3);
+3963: item_size = 1
__pyx_v_item_size = 1;
+3964: item_ptr = &item_value
__pyx_v_item_ptr = (&__pyx_v_item_value);
3965: else:
+3966: item_view = item
/*else*/ { __pyx_t_4 = __Pyx_PyObject_to_MemoryviewSlice_ds_nn_byte_t__const__(__pyx_v_item, 0); if (unlikely(!__pyx_t_4.memview)) __PYX_ERR(0, 3966, __pyx_L1_error) __pyx_v_item_view = __pyx_t_4; __pyx_t_4.memview = NULL; __pyx_t_4.data = NULL;
+3967: item_size = 1
__pyx_v_item_size = 1;
3968: with cython.boundscheck(False):
+3969: item_ptr = &item_view[0]
__pyx_t_5 = 0; if (__pyx_t_5 < 0) __pyx_t_5 += __pyx_v_item_view.shape[0]; __pyx_v_item_ptr = (&(*((byte_t const *) ( /* dim=0 */ (__pyx_v_item_view.data + __pyx_t_5 * __pyx_v_item_view.strides[0]) )))); } __pyx_L3:;
3970:
3971: # Faster code for unbounded slice
+3972: if start is None and endex is None:
__pyx_t_1 = (__pyx_v_start == Py_None); __pyx_t_6 = (__pyx_t_1 != 0); if (__pyx_t_6) { } else { __pyx_t_2 = __pyx_t_6; goto __pyx_L5_bool_binop_done; } __pyx_t_6 = (__pyx_v_endex == Py_None); __pyx_t_1 = (__pyx_t_6 != 0); __pyx_t_2 = __pyx_t_1; __pyx_L5_bool_binop_done:; if (__pyx_t_2) { /* … */ }
+3973: return self.count_unbounded_(item_size, item_ptr)
__Pyx_XDECREF(__pyx_r); __pyx_t_7 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->count_unbounded_(__pyx_v_self, __pyx_v_item_size, __pyx_v_item_ptr); if (unlikely(__pyx_t_7 == ((addr_t)-1LL))) __PYX_ERR(0, 3973, __pyx_L1_error) __pyx_t_8 = __Pyx_PyInt_From_uint_fast64_t(__pyx_t_7); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 3973, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_8); __pyx_r = __pyx_t_8; __pyx_t_8 = 0; goto __pyx_L0;
3974:
3975: # Bounded slice
+3976: start_, endex_ = self.bound_(start, endex)
__pyx_t_9 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->bound_(__pyx_v_self, __pyx_v_start, __pyx_v_endex); __pyx_t_7 = __pyx_t_9.f0; __pyx_t_10 = __pyx_t_9.f1; __pyx_v_start_ = __pyx_t_7; __pyx_v_endex_ = __pyx_t_10;
+3977: return self.count_bounded_(item_size, item_ptr, start_, endex_)
__Pyx_XDECREF(__pyx_r); __pyx_t_10 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->count_bounded_(__pyx_v_self, __pyx_v_item_size, __pyx_v_item_ptr, __pyx_v_start_, __pyx_v_endex_); if (unlikely(__pyx_t_10 == ((addr_t)-1LL))) __PYX_ERR(0, 3977, __pyx_L1_error) __pyx_t_8 = __Pyx_PyInt_From_uint_fast64_t(__pyx_t_10); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 3977, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_8); __pyx_r = __pyx_t_8; __pyx_t_8 = 0; goto __pyx_L0;
3978:
+3979: def __getitem__(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_47__getitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_key); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_46__getitem__[] = "Gets data.\n\n Arguments:\n key (slice or int):\n Selection range or address.\n If it is a :obj:`slice` with bytes-like `step`, the latter is\n interpreted as the filling pattern.\n\n Returns:\n items: Items from the requested range.\n\n Note:\n This method is not optimized for a :class:`slice` where its `step`\n is an integer greater than 1.\n\n Example:\n +---+---+---+---+---+---+---+---+---+---+---+\n | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10|\n +===+===+===+===+===+===+===+===+===+===+===+\n | |[A | B | C | D]| |[$]| |[x | y | z]|\n +---+---+---+---+---+---+---+---+---+---+---+\n | | 65| 66| 67| 68| | 36| |120|121|122|\n +---+---+---+---+---+---+---+---+---+---+---+\n\n >>> memory = Memory(blocks=[[1, b'ABCD'], [6, b'$'], [8, b'xyz']])\n >>> memory[9] # -> ord('y') = 121\n 121\n >>> memory[:3]._blocks\n [[1, b'AB']]\n >>> memory[3:10]._blocks\n [[3, b'CD'], [6, b'$'], [8, b'xy']]\n >>> bytes(memory[3:10:b'.'])\n b'CD.$.xy'\n >>> memory[memory.endex]\n None\n >>> bytes(memory[3:10:3])\n b'C$y'\n >>> memory[3:10:2]._blocks\n [[3, b'C'], [6, b'y']]\n >>> bytes(memory[3:10:2])\n Traceback (most recent call last):\n ...\n ValueError: non-contiguous data within range\n "; #if CYTHON_COMPILING_IN_CPYTHON struct wrapperbase __pyx_wrapperbase_10bytesparse_2_c_6Memory_46__getitem__; #endif static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_47__getitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_key) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__getitem__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_46__getitem__(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), ((PyObject *)__pyx_v_key)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_46__getitem__(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_key) { PyObject *__pyx_v_key_ = 0; addr_t __pyx_v_start; addr_t __pyx_v_endex; Block_ *__pyx_v_pattern; struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_memory = 0; int __pyx_v_value; PyObject *__pyx_v_key_start = NULL; PyObject *__pyx_v_key_endex = NULL; PyObject *__pyx_v_key_step = NULL; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__getitem__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_18); __Pyx_AddTraceback("bytesparse._c.Memory.__getitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XDECREF(__pyx_v_key_); __Pyx_XDECREF((PyObject *)__pyx_v_memory); __Pyx_XDECREF(__pyx_v_key_start); __Pyx_XDECREF(__pyx_v_key_endex); __Pyx_XDECREF(__pyx_v_key_step); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
3980: self: 'Memory',
3981: key: Union[Address, slice],
3982: ) -> Any:
3983: r"""Gets data.
3984:
3985: Arguments:
3986: key (slice or int):
3987: Selection range or address.
3988: If it is a :obj:`slice` with bytes-like `step`, the latter is
3989: interpreted as the filling pattern.
3990:
3991: Returns:
3992: items: Items from the requested range.
3993:
3994: Note:
3995: This method is not optimized for a :class:`slice` where its `step`
3996: is an integer greater than 1.
3997:
3998: Example:
3999: +---+---+---+---+---+---+---+---+---+---+---+
4000: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10|
4001: +===+===+===+===+===+===+===+===+===+===+===+
4002: | |[A | B | C | D]| |[$]| |[x | y | z]|
4003: +---+---+---+---+---+---+---+---+---+---+---+
4004: | | 65| 66| 67| 68| | 36| |120|121|122|
4005: +---+---+---+---+---+---+---+---+---+---+---+
4006:
4007: >>> memory = Memory(blocks=[[1, b'ABCD'], [6, b'$'], [8, b'xyz']])
4008: >>> memory[9] # -> ord('y') = 121
4009: 121
4010: >>> memory[:3]._blocks
4011: [[1, b'AB']]
4012: >>> memory[3:10]._blocks
4013: [[3, b'CD'], [6, b'$'], [8, b'xy']]
4014: >>> bytes(memory[3:10:b'.'])
4015: b'CD.$.xy'
4016: >>> memory[memory.endex]
4017: None
4018: >>> bytes(memory[3:10:3])
4019: b'C$y'
4020: >>> memory[3:10:2]._blocks
4021: [[3, b'C'], [6, b'y']]
4022: >>> bytes(memory[3:10:2])
4023: Traceback (most recent call last):
4024: ...
4025: ValueError: non-contiguous data within range
4026: """
4027: cdef:
4028: slice key_
4029: addr_t start
4030: addr_t endex
+4031: Block_* pattern = NULL
__pyx_v_pattern = NULL;
4032: Memory memory
4033: int value
4034:
+4035: if isinstance(key, slice):
__pyx_t_1 = PySlice_Check(__pyx_v_key);
__pyx_t_2 = (__pyx_t_1 != 0);
if (__pyx_t_2) {
/* … */
}
+4036: key_ = <slice>key
__pyx_t_3 = __pyx_v_key;
__Pyx_INCREF(__pyx_t_3);
__pyx_v_key_ = ((PyObject*)__pyx_t_3);
__pyx_t_3 = 0;
+4037: key_start = key_.start
__pyx_t_3 = ((PySliceObject*)__pyx_v_key_)->start;
__Pyx_INCREF(__pyx_t_3);
__pyx_v_key_start = __pyx_t_3;
__pyx_t_3 = 0;
+4038: key_endex = key_.stop
__pyx_t_3 = ((PySliceObject*)__pyx_v_key_)->stop;
__Pyx_INCREF(__pyx_t_3);
__pyx_v_key_endex = __pyx_t_3;
__pyx_t_3 = 0;
+4039: start = self.start_() if key_start is None else <addr_t>key_start
__pyx_t_2 = (__pyx_v_key_start == Py_None); if ((__pyx_t_2 != 0)) { __pyx_t_4 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->start_(__pyx_v_self); } else { __pyx_t_5 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_key_start); if (unlikely((__pyx_t_5 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 4039, __pyx_L1_error) __pyx_t_4 = ((addr_t)__pyx_t_5); } __pyx_v_start = __pyx_t_4;
+4040: endex = self.endex_() if key_endex is None else <addr_t>key_endex
__pyx_t_2 = (__pyx_v_key_endex == Py_None); if ((__pyx_t_2 != 0)) { __pyx_t_4 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->endex_(__pyx_v_self); } else { __pyx_t_5 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_key_endex); if (unlikely((__pyx_t_5 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 4040, __pyx_L1_error) __pyx_t_4 = ((addr_t)__pyx_t_5); } __pyx_v_endex = __pyx_t_4;
+4041: key_step = key_.step
__pyx_t_3 = ((PySliceObject*)__pyx_v_key_)->step;
__Pyx_INCREF(__pyx_t_3);
__pyx_v_key_step = __pyx_t_3;
__pyx_t_3 = 0;
4042:
+4043: if key_step is None or key_step is 1 or key_step == 1:
__pyx_t_1 = (__pyx_v_key_step == Py_None); __pyx_t_6 = (__pyx_t_1 != 0); if (!__pyx_t_6) { } else { __pyx_t_2 = __pyx_t_6; goto __pyx_L5_bool_binop_done; } __pyx_t_6 = (__pyx_v_key_step == __pyx_int_1); __pyx_t_1 = (__pyx_t_6 != 0); if (!__pyx_t_1) { } else { __pyx_t_2 = __pyx_t_1; goto __pyx_L5_bool_binop_done; } __pyx_t_3 = __Pyx_PyInt_EqObjC(__pyx_v_key_step, __pyx_int_1, 1, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 4043, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 4043, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __pyx_t_2 = __pyx_t_1; __pyx_L5_bool_binop_done:; if (__pyx_t_2) { /* … */ }
+4044: return self.extract_(start, endex, 0, NULL, 1, True)
__Pyx_XDECREF(__pyx_r); __pyx_t_3 = ((PyObject *)((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->extract_(__pyx_v_self, __pyx_v_start, __pyx_v_endex, 0, NULL, 1, 1)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 4044, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_r = __pyx_t_3; __pyx_t_3 = 0; goto __pyx_L0;
4045:
+4046: elif isinstance(key_step, int):
__pyx_t_2 = PyInt_Check(__pyx_v_key_step);
__pyx_t_1 = (__pyx_t_2 != 0);
if (__pyx_t_1) {
/* … */
}
+4047: if key_step > 1:
__pyx_t_3 = PyObject_RichCompare(__pyx_v_key_step, __pyx_int_1, Py_GT); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 4047, __pyx_L1_error) __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 4047, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; if (__pyx_t_1) { /* … */ }
+4048: return self.extract_(start, endex, 0, NULL, <saddr_t>key_step, True)
__Pyx_XDECREF(__pyx_r); __pyx_t_7 = __Pyx_PyInt_As_int_fast64_t(__pyx_v_key_step); if (unlikely((__pyx_t_7 == ((saddr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 4048, __pyx_L1_error) __pyx_t_3 = ((PyObject *)((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->extract_(__pyx_v_self, __pyx_v_start, __pyx_v_endex, 0, NULL, ((saddr_t)__pyx_t_7), 1)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 4048, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_r = __pyx_t_3; __pyx_t_3 = 0; goto __pyx_L0;
4049: else:
+4050: return Memory() # empty
/*else*/ { __Pyx_XDECREF(__pyx_r); __pyx_t_3 = __Pyx_PyObject_CallNoArg(((PyObject *)__pyx_ptype_10bytesparse_2_c_Memory)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 4050, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_r = __pyx_t_3; __pyx_t_3 = 0; goto __pyx_L0; }
4051:
4052: else:
+4053: pattern = Block_FromObject(0, key_step, True)
/*else*/ {
__pyx_t_8 = __pyx_f_10bytesparse_2_c_Block_FromObject(0, __pyx_v_key_step, 1); if (unlikely(__pyx_t_8 == ((Block_ *)NULL))) __PYX_ERR(0, 4053, __pyx_L1_error)
__pyx_v_pattern = __pyx_t_8;
+4054: try:
/*try:*/ {
+4055: memory = self.extract_(start, endex, Block_Length(pattern), Block_At__(pattern, 0), 1, True)
__pyx_t_3 = ((PyObject *)((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->extract_(__pyx_v_self, __pyx_v_start, __pyx_v_endex, __pyx_f_10bytesparse_2_c_Block_Length(__pyx_v_pattern), __pyx_f_10bytesparse_2_c_Block_At__(__pyx_v_pattern, 0), 1, 1)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 4055, __pyx_L10_error) __Pyx_GOTREF(__pyx_t_3); __pyx_v_memory = ((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_t_3); __pyx_t_3 = 0; }
4056: finally:
+4057: Block_Free(pattern) # orphan
/*finally:*/ { /*normal exit:*/{ (void)(__pyx_f_10bytesparse_2_c_Block_Free(__pyx_v_pattern)); goto __pyx_L11; } __pyx_L10_error:; /*exception exit:*/{ __Pyx_PyThreadState_declare __Pyx_PyThreadState_assign __pyx_t_12 = 0; __pyx_t_13 = 0; __pyx_t_14 = 0; __pyx_t_15 = 0; __pyx_t_16 = 0; __pyx_t_17 = 0; __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_15, &__pyx_t_16, &__pyx_t_17); if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_12, &__pyx_t_13, &__pyx_t_14) < 0)) __Pyx_ErrFetch(&__pyx_t_12, &__pyx_t_13, &__pyx_t_14); __Pyx_XGOTREF(__pyx_t_12); __Pyx_XGOTREF(__pyx_t_13); __Pyx_XGOTREF(__pyx_t_14); __Pyx_XGOTREF(__pyx_t_15); __Pyx_XGOTREF(__pyx_t_16); __Pyx_XGOTREF(__pyx_t_17); __pyx_t_9 = __pyx_lineno; __pyx_t_10 = __pyx_clineno; __pyx_t_11 = __pyx_filename; { (void)(__pyx_f_10bytesparse_2_c_Block_Free(__pyx_v_pattern)); } if (PY_MAJOR_VERSION >= 3) { __Pyx_XGIVEREF(__pyx_t_15); __Pyx_XGIVEREF(__pyx_t_16); __Pyx_XGIVEREF(__pyx_t_17); __Pyx_ExceptionReset(__pyx_t_15, __pyx_t_16, __pyx_t_17); } __Pyx_XGIVEREF(__pyx_t_12); __Pyx_XGIVEREF(__pyx_t_13); __Pyx_XGIVEREF(__pyx_t_14); __Pyx_ErrRestore(__pyx_t_12, __pyx_t_13, __pyx_t_14); __pyx_t_12 = 0; __pyx_t_13 = 0; __pyx_t_14 = 0; __pyx_t_15 = 0; __pyx_t_16 = 0; __pyx_t_17 = 0; __pyx_lineno = __pyx_t_9; __pyx_clineno = __pyx_t_10; __pyx_filename = __pyx_t_11; goto __pyx_L1_error; } __pyx_L11:; }
+4058: return memory
__Pyx_XDECREF(__pyx_r); __Pyx_INCREF(((PyObject *)__pyx_v_memory)); __pyx_r = ((PyObject *)__pyx_v_memory); goto __pyx_L0; }
4059: else:
+4060: value = self.peek_(<addr_t>key)
/*else*/ { __pyx_t_4 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_key); if (unlikely((__pyx_t_4 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 4060, __pyx_L1_error) __pyx_t_10 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->peek_(__pyx_v_self, ((addr_t)__pyx_t_4)); if (unlikely(__pyx_t_10 == ((int)-2))) __PYX_ERR(0, 4060, __pyx_L1_error) __pyx_v_value = __pyx_t_10;
+4061: return None if value < 0 else value
__Pyx_XDECREF(__pyx_r); if (((__pyx_v_value < 0) != 0)) { __Pyx_INCREF(Py_None); __pyx_t_3 = Py_None; } else { __pyx_t_18 = __Pyx_PyInt_From_int(__pyx_v_value); if (unlikely(!__pyx_t_18)) __PYX_ERR(0, 4061, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_18); __pyx_t_3 = __pyx_t_18; __pyx_t_18 = 0; } __pyx_r = __pyx_t_3; __pyx_t_3 = 0; goto __pyx_L0; }
4062:
+4063: def __setitem__(
/* Python wrapper */ static int __pyx_pw_10bytesparse_2_c_6Memory_49__setitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_value); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_48__setitem__[] = "Sets data.\n\n Arguments:\n key (slice or int):\n Selection range or address.\n\n value (items):\n Items to write at the selection address.\n If `value` is null, the range is cleared.\n\n Note:\n This method is not optimized for a :class:`slice` where its `step`\n is an integer greater than 1.\n\n Examples:\n +---+---+---+---+---+---+---+---+---+\n | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11| 12|\n +===+===+===+===+===+===+===+===+===+\n | |[A | B | C]| |[x | y | z]| |\n +---+---+---+---+---+---+---+---+---+\n | |[A]| | | | |[y | z]| |\n +---+---+---+---+---+---+---+---+---+\n | |[A | B | C]| |[x | y | z]| |\n +---+---+---+---+---+---+---+---+---+\n | |[A]| |[C]| | | y | z]| |\n +---+---+---+---+---+---+---+---+---+\n | |[A | 1 | C]| |[2 | y | z]| |\n +---+---+---+---+---+---+---+---+---+\n\n >>> memory = Memory(blocks=[[5, b'ABC'], [9, b'xyz']])\n >>> memory[7:10] = None\n >>> memory._blocks\n [[5, b'AB'], [10, b'yz']]\n >>> memory[7] = b'C'\n >>> memory[9] = b'x'\n >>> memory._blocks == [[5, b'ABC'], [9, b'xyz']]\n True\n >>> memory[6:12:3] = None\n >>> memory._blocks\n [[5, b'A'], [7, b'C'], [10, b'yz']]\n >>> memory[6:13:3] = b'123'\n >>> memory._blocks\n [[5, b'A1C'], [9, b'2yz3']]\n\n ~~~\n\n +---+---+---+---+---+---+---+---+---+---+---+---+\n | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11|\n +===+===+===+===+===+===+===+===+===+===+===+===+\n | | | | | |[A | B | C]| |[x | y | z]|\n +---+---+---+---+---+---+---+---+---+---+---+---+\n |[$]| |""[A | B | C]| |[x | y | z]| | | |\n +---+---+---+---+---+---+---+---+---+---+---+---+\n |[$]| |[A | B | 4 | 5 | 6 | 7 | 8 | y | z]| |\n +---+---+---+---+---+---+---+---+---+---+---+---+\n |[$]| |[A | B | 4 | 5 | < | > | 8 | y | z]| |\n +---+---+---+---+---+---+---+---+---+---+---+---+\n\n >>> memory = Memory(blocks=[[5, b'ABC'], [9, b'xyz']])\n >>> memory[0:4] = b'$'\n >>> memory._blocks\n [[0, b'$'], [2, b'ABC'], [6, b'xyz']]\n >>> memory[4:7] = b'45678'\n >>> memory._blocks\n [[0, b'$'], [2, b'AB45678yz']]\n >>> memory[6:8] = b'<>'\n >>> memory._blocks\n [[0, b'$'], [2, b'AB45<>8yz']]\n "; #if CYTHON_COMPILING_IN_CPYTHON struct wrapperbase __pyx_wrapperbase_10bytesparse_2_c_6Memory_48__setitem__; #endif static int __pyx_pw_10bytesparse_2_c_6Memory_49__setitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_value) { int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__setitem__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_48__setitem__(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), ((PyObject *)__pyx_v_key), ((PyObject *)__pyx_v_value)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static int __pyx_pf_10bytesparse_2_c_6Memory_48__setitem__(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_value) { PyObject *__pyx_v_key_ = 0; addr_t __pyx_v_start; addr_t __pyx_v_endex; addr_t __pyx_v_step; addr_t __pyx_v_address; addr_t __pyx_v_slice_size; Block_ *__pyx_v_value_; size_t __pyx_v_value_size; addr_t __pyx_v_del_start; addr_t __pyx_v_del_endex; size_t __pyx_v_offset; PyObject *__pyx_v_key_start = NULL; PyObject *__pyx_v_key_endex = NULL; PyObject *__pyx_v_key_step = NULL; int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__setitem__", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_11); __Pyx_XDECREF(__pyx_t_12); __Pyx_AddTraceback("bytesparse._c.Memory.__setitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_XDECREF(__pyx_v_key_); __Pyx_XDECREF(__pyx_v_key_start); __Pyx_XDECREF(__pyx_v_key_endex); __Pyx_XDECREF(__pyx_v_key_step); __Pyx_RefNannyFinishContext(); return __pyx_r; }
4064: self: 'Memory',
4065: key: Union[Address, slice],
4066: value: Optional[Union[AnyBytes, Value]],
4067: ) -> None:
4068: r"""Sets data.
4069:
4070: Arguments:
4071: key (slice or int):
4072: Selection range or address.
4073:
4074: value (items):
4075: Items to write at the selection address.
4076: If `value` is null, the range is cleared.
4077:
4078: Note:
4079: This method is not optimized for a :class:`slice` where its `step`
4080: is an integer greater than 1.
4081:
4082: Examples:
4083: +---+---+---+---+---+---+---+---+---+
4084: | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11| 12|
4085: +===+===+===+===+===+===+===+===+===+
4086: | |[A | B | C]| |[x | y | z]| |
4087: +---+---+---+---+---+---+---+---+---+
4088: | |[A]| | | | |[y | z]| |
4089: +---+---+---+---+---+---+---+---+---+
4090: | |[A | B | C]| |[x | y | z]| |
4091: +---+---+---+---+---+---+---+---+---+
4092: | |[A]| |[C]| | | y | z]| |
4093: +---+---+---+---+---+---+---+---+---+
4094: | |[A | 1 | C]| |[2 | y | z]| |
4095: +---+---+---+---+---+---+---+---+---+
4096:
4097: >>> memory = Memory(blocks=[[5, b'ABC'], [9, b'xyz']])
4098: >>> memory[7:10] = None
4099: >>> memory._blocks
4100: [[5, b'AB'], [10, b'yz']]
4101: >>> memory[7] = b'C'
4102: >>> memory[9] = b'x'
4103: >>> memory._blocks == [[5, b'ABC'], [9, b'xyz']]
4104: True
4105: >>> memory[6:12:3] = None
4106: >>> memory._blocks
4107: [[5, b'A'], [7, b'C'], [10, b'yz']]
4108: >>> memory[6:13:3] = b'123'
4109: >>> memory._blocks
4110: [[5, b'A1C'], [9, b'2yz3']]
4111:
4112: ~~~
4113:
4114: +---+---+---+---+---+---+---+---+---+---+---+---+
4115: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11|
4116: +===+===+===+===+===+===+===+===+===+===+===+===+
4117: | | | | | |[A | B | C]| |[x | y | z]|
4118: +---+---+---+---+---+---+---+---+---+---+---+---+
4119: |[$]| |[A | B | C]| |[x | y | z]| | | |
4120: +---+---+---+---+---+---+---+---+---+---+---+---+
4121: |[$]| |[A | B | 4 | 5 | 6 | 7 | 8 | y | z]| |
4122: +---+---+---+---+---+---+---+---+---+---+---+---+
4123: |[$]| |[A | B | 4 | 5 | < | > | 8 | y | z]| |
4124: +---+---+---+---+---+---+---+---+---+---+---+---+
4125:
4126: >>> memory = Memory(blocks=[[5, b'ABC'], [9, b'xyz']])
4127: >>> memory[0:4] = b'$'
4128: >>> memory._blocks
4129: [[0, b'$'], [2, b'ABC'], [6, b'xyz']]
4130: >>> memory[4:7] = b'45678'
4131: >>> memory._blocks
4132: [[0, b'$'], [2, b'AB45678yz']]
4133: >>> memory[6:8] = b'<>'
4134: >>> memory._blocks
4135: [[0, b'$'], [2, b'AB45<>8yz']]
4136: """
4137: cdef:
4138: slice key_
4139: addr_t start
4140: addr_t endex
+4141: addr_t step = 0 # indefinite
__pyx_v_step = 0;
4142: addr_t address
4143: addr_t slice_size
+4144: Block_* value_ = NULL
__pyx_v_value_ = NULL;
4145: size_t value_size
4146: addr_t del_start
4147: addr_t del_endex
4148: size_t offset
4149:
+4150: if isinstance(key, slice):
__pyx_t_1 = PySlice_Check(__pyx_v_key);
__pyx_t_2 = (__pyx_t_1 != 0);
if (__pyx_t_2) {
/* … */
goto __pyx_L3;
}
+4151: key_ = <slice>key
__pyx_t_3 = __pyx_v_key;
__Pyx_INCREF(__pyx_t_3);
__pyx_v_key_ = ((PyObject*)__pyx_t_3);
__pyx_t_3 = 0;
+4152: key_start = key_.start
__pyx_t_3 = ((PySliceObject*)__pyx_v_key_)->start;
__Pyx_INCREF(__pyx_t_3);
__pyx_v_key_start = __pyx_t_3;
__pyx_t_3 = 0;
+4153: key_endex = key_.stop
__pyx_t_3 = ((PySliceObject*)__pyx_v_key_)->stop;
__Pyx_INCREF(__pyx_t_3);
__pyx_v_key_endex = __pyx_t_3;
__pyx_t_3 = 0;
+4154: start = self.start_() if key_start is None else <addr_t>key_start
__pyx_t_2 = (__pyx_v_key_start == Py_None); if ((__pyx_t_2 != 0)) { __pyx_t_4 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->start_(__pyx_v_self); } else { __pyx_t_5 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_key_start); if (unlikely((__pyx_t_5 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 4154, __pyx_L1_error) __pyx_t_4 = ((addr_t)__pyx_t_5); } __pyx_v_start = __pyx_t_4;
+4155: endex = self.endex_() if key_endex is None else <addr_t>key_endex
__pyx_t_2 = (__pyx_v_key_endex == Py_None); if ((__pyx_t_2 != 0)) { __pyx_t_4 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->endex_(__pyx_v_self); } else { __pyx_t_5 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_key_endex); if (unlikely((__pyx_t_5 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 4155, __pyx_L1_error) __pyx_t_4 = ((addr_t)__pyx_t_5); } __pyx_v_endex = __pyx_t_4;
+4156: if endex < start:
__pyx_t_2 = ((__pyx_v_endex < __pyx_v_start) != 0); if (__pyx_t_2) { /* … */ }
+4157: endex = start
__pyx_v_endex = __pyx_v_start;
4158:
+4159: key_step = key_.step
__pyx_t_3 = ((PySliceObject*)__pyx_v_key_)->step;
__Pyx_INCREF(__pyx_t_3);
__pyx_v_key_step = __pyx_t_3;
__pyx_t_3 = 0;
+4160: if isinstance(key_step, int):
__pyx_t_2 = PyInt_Check(__pyx_v_key_step);
__pyx_t_1 = (__pyx_t_2 != 0);
if (__pyx_t_1) {
/* … */
}
+4161: if key_step is None or key_step is 1 or key_step == 1:
__pyx_t_2 = (__pyx_v_key_step == Py_None); __pyx_t_6 = (__pyx_t_2 != 0); if (!__pyx_t_6) { } else { __pyx_t_1 = __pyx_t_6; goto __pyx_L7_bool_binop_done; } __pyx_t_6 = (__pyx_v_key_step == __pyx_int_1); __pyx_t_2 = (__pyx_t_6 != 0); if (!__pyx_t_2) { } else { __pyx_t_1 = __pyx_t_2; goto __pyx_L7_bool_binop_done; } __pyx_t_3 = __Pyx_PyInt_EqObjC(__pyx_v_key_step, __pyx_int_1, 1, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 4161, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 4161, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __pyx_t_1 = __pyx_t_2; __pyx_L7_bool_binop_done:; if (__pyx_t_1) { goto __pyx_L6; }
4162: pass
+4163: elif key_step > 1:
__pyx_t_3 = PyObject_RichCompare(__pyx_v_key_step, __pyx_int_1, Py_GT); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 4163, __pyx_L1_error) __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 4163, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; if (__pyx_t_1) { /* … */ goto __pyx_L6; }
+4164: step = <addr_t>key_step
__pyx_t_4 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_key_step); if (unlikely((__pyx_t_4 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 4164, __pyx_L1_error) __pyx_v_step = ((addr_t)__pyx_t_4);
4165: else:
+4166: return # empty range
/*else*/ { __pyx_r = 0; goto __pyx_L0; } __pyx_L6:;
4167:
+4168: if value is None:
__pyx_t_1 = (__pyx_v_value == Py_None); __pyx_t_2 = (__pyx_t_1 != 0); if (__pyx_t_2) { /* … */ }
4169: # Clear range
+4170: if not step:
__pyx_t_2 = ((!(__pyx_v_step != 0)) != 0); if (__pyx_t_2) { /* … */ goto __pyx_L11; }
+4171: self._erase_(start, endex, False, False) # clear
__pyx_t_2 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_erase_(__pyx_v_self, __pyx_v_start, __pyx_v_endex, 0, 0); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 4171, __pyx_L1_error)
4172: else:
+4173: address = start
/*else*/ { __pyx_v_address = __pyx_v_start;
+4174: while address < endex:
while (1) { __pyx_t_2 = ((__pyx_v_address < __pyx_v_endex) != 0); if (!__pyx_t_2) break;
+4175: self._erase_(address, address + 1, False, False) # clear
__pyx_t_2 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_erase_(__pyx_v_self, __pyx_v_address, (__pyx_v_address + 1), 0, 0); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 4175, __pyx_L1_error)
+4176: if CannotAddAddrU(address, step):
__pyx_t_2 = (__pyx_f_10bytesparse_2_c_CannotAddAddrU(__pyx_v_address, __pyx_v_step) != 0); if (__pyx_t_2) { /* … */ }
+4177: break
goto __pyx_L13_break;
+4178: address += step
__pyx_v_address = (__pyx_v_address + __pyx_v_step); } __pyx_L13_break:; } __pyx_L11:;
+4179: return # nothing to write
__pyx_r = 0; goto __pyx_L0;
4180:
+4181: slice_size = endex - start
__pyx_v_slice_size = (__pyx_v_endex - __pyx_v_start);
+4182: if step:
__pyx_t_2 = (__pyx_v_step != 0); if (__pyx_t_2) { /* … */ }
4183: with cython.cdivision(True):
+4184: slice_size = (slice_size + step - 1) // step
__pyx_v_slice_size = (((__pyx_v_slice_size + __pyx_v_step) - 1) / __pyx_v_step);
+4185: CheckAddrToSizeU(slice_size)
__pyx_t_7 = __pyx_f_10bytesparse_2_c_CheckAddrToSizeU(__pyx_v_slice_size); if (unlikely(__pyx_t_7 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 4185, __pyx_L1_error)
4186:
+4187: value_ = Block_FromObject(0, value, False)
__pyx_t_8 = __pyx_f_10bytesparse_2_c_Block_FromObject(0, __pyx_v_value, 0); if (unlikely(__pyx_t_8 == ((Block_ *)NULL))) __PYX_ERR(0, 4187, __pyx_L1_error)
__pyx_v_value_ = __pyx_t_8;
+4188: try:
/*try:*/ {
+4189: if isinstance(value, int):
__pyx_t_2 = PyInt_Check(__pyx_v_value);
__pyx_t_1 = (__pyx_t_2 != 0);
if (__pyx_t_1) {
/* … */
}
+4190: value_ = Block_Repeat(value_, <size_t>slice_size)
__pyx_t_8 = __pyx_f_10bytesparse_2_c_Block_Repeat(__pyx_v_value_, ((size_t)__pyx_v_slice_size)); if (unlikely(__pyx_t_8 == ((Block_ *)NULL))) __PYX_ERR(0, 4190, __pyx_L17_error)
__pyx_v_value_ = __pyx_t_8;
+4191: value_size = Block_Length(value_)
__pyx_v_value_size = __pyx_f_10bytesparse_2_c_Block_Length(__pyx_v_value_);
4192:
+4193: if value_size < slice_size:
__pyx_t_1 = ((__pyx_v_value_size < __pyx_v_slice_size) != 0); if (__pyx_t_1) { /* … */ goto __pyx_L20; }
4194: # Shrink: remove excess, overwrite existing
+4195: if not step or not value_size:
__pyx_t_2 = ((!(__pyx_v_step != 0)) != 0); if (!__pyx_t_2) { } else { __pyx_t_1 = __pyx_t_2; goto __pyx_L22_bool_binop_done; } __pyx_t_2 = ((!(__pyx_v_value_size != 0)) != 0); __pyx_t_1 = __pyx_t_2; __pyx_L22_bool_binop_done:; if (likely(__pyx_t_1)) { /* … */ goto __pyx_L21; }
+4196: if CannotAddAddrU(start, value_size):
__pyx_t_1 = (__pyx_f_10bytesparse_2_c_CannotAddAddrU(__pyx_v_start, __pyx_v_value_size) != 0); if (__pyx_t_1) { /* … */ goto __pyx_L24; }
+4197: del_start = ADDR_MAX
__pyx_v_del_start = ADDR_MAX;
4198: else:
+4199: del_start = start + value_size
/*else*/ { __pyx_v_del_start = (__pyx_v_start + __pyx_v_value_size); } __pyx_L24:;
+4200: if CannotAddAddrU(del_start, (slice_size - value_size)):
__pyx_t_1 = (__pyx_f_10bytesparse_2_c_CannotAddAddrU(__pyx_v_del_start, (__pyx_v_slice_size - __pyx_v_value_size)) != 0); if (__pyx_t_1) { /* … */ goto __pyx_L25; }
+4201: del_endex = ADDR_MAX
__pyx_v_del_endex = ADDR_MAX;
4202: else:
+4203: del_endex = del_start + (slice_size - value_size)
/*else*/ { __pyx_v_del_endex = (__pyx_v_del_start + (__pyx_v_slice_size - __pyx_v_value_size)); } __pyx_L25:;
+4204: self._erase_(del_start, del_endex, True, True) # delete
__pyx_t_1 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_erase_(__pyx_v_self, __pyx_v_del_start, __pyx_v_del_endex, 1, 1); if (unlikely(__pyx_t_1 == ((int)-1))) __PYX_ERR(0, 4204, __pyx_L17_error)
+4205: if value_size:
__pyx_t_1 = (__pyx_v_value_size != 0); if (__pyx_t_1) { /* … */ }
+4206: self.write_raw_(start, value_size, Block_At__(value_, 0), None)
__pyx_t_7 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->write_raw_(__pyx_v_self, __pyx_v_start, __pyx_v_value_size, __pyx_f_10bytesparse_2_c_Block_At__(__pyx_v_value_, 0), ((PyObject*)Py_None)); if (unlikely(__pyx_t_7 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 4206, __pyx_L17_error)
4207: else:
+4208: raise ValueError(f'attempt to assign bytes of size {value_size}'
/*else*/ { __pyx_t_3 = PyTuple_New(4); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 4208, __pyx_L17_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_9 = 0; __pyx_t_10 = 127; __Pyx_INCREF(__pyx_kp_u_attempt_to_assign_bytes_of_size); __pyx_t_9 += 32; __Pyx_GIVEREF(__pyx_kp_u_attempt_to_assign_bytes_of_size); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_kp_u_attempt_to_assign_bytes_of_size); __pyx_t_11 = __Pyx_PyUnicode_From_size_t(__pyx_v_value_size, 0, ' ', 'd'); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 4208, __pyx_L17_error) __Pyx_GOTREF(__pyx_t_11); __pyx_t_9 += __Pyx_PyUnicode_GET_LENGTH(__pyx_t_11); __Pyx_GIVEREF(__pyx_t_11); PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_11); __pyx_t_11 = 0; __Pyx_INCREF(__pyx_kp_u_to_extended_slice_of_size); __pyx_t_9 += 27; __Pyx_GIVEREF(__pyx_kp_u_to_extended_slice_of_size); PyTuple_SET_ITEM(__pyx_t_3, 2, __pyx_kp_u_to_extended_slice_of_size); /* … */ __pyx_t_12 = __Pyx_PyUnicode_Join(__pyx_t_3, 4, __pyx_t_9, __pyx_t_10); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 4208, __pyx_L17_error) __Pyx_GOTREF(__pyx_t_12); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_builtin_ValueError, __pyx_t_12); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 4208, __pyx_L17_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; __Pyx_Raise(__pyx_t_3, 0, 0, 0); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(0, 4208, __pyx_L17_error) } __pyx_L21:;
+4209: f' to extended slice of size {slice_size}')
__pyx_t_11 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_slice_size); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 4209, __pyx_L17_error) __Pyx_GOTREF(__pyx_t_11); __pyx_t_12 = __Pyx_PyObject_FormatSimple(__pyx_t_11, __pyx_empty_unicode); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 4209, __pyx_L17_error) __Pyx_GOTREF(__pyx_t_12); __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; __pyx_t_10 = (__Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_12) > __pyx_t_10) ? __Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_12) : __pyx_t_10; __pyx_t_9 += __Pyx_PyUnicode_GET_LENGTH(__pyx_t_12); __Pyx_GIVEREF(__pyx_t_12); PyTuple_SET_ITEM(__pyx_t_3, 3, __pyx_t_12); __pyx_t_12 = 0;
+4210: elif slice_size < value_size:
__pyx_t_1 = ((__pyx_v_slice_size < __pyx_v_value_size) != 0); if (__pyx_t_1) { /* … */ goto __pyx_L20; }
4211: # Enlarge: insert excess, overwrite existing
+4212: if not step:
__pyx_t_1 = ((!(__pyx_v_step != 0)) != 0); if (likely(__pyx_t_1)) { /* … */ goto __pyx_L27; }
+4213: self.insert_raw_(endex, value_size - slice_size, Block_At__(value_, slice_size), None)
__pyx_t_7 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->insert_raw_(__pyx_v_self, __pyx_v_endex, (__pyx_v_value_size - __pyx_v_slice_size), __pyx_f_10bytesparse_2_c_Block_At__(__pyx_v_value_, __pyx_v_slice_size), ((PyObject*)Py_None)); if (unlikely(__pyx_t_7 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 4213, __pyx_L17_error)
+4214: self.write_raw_(start, slice_size, Block_At__(value_, 0), None)
__pyx_t_7 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->write_raw_(__pyx_v_self, __pyx_v_start, __pyx_v_slice_size, __pyx_f_10bytesparse_2_c_Block_At__(__pyx_v_value_, 0), ((PyObject*)Py_None)); if (unlikely(__pyx_t_7 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 4214, __pyx_L17_error)
4215: else:
+4216: raise ValueError(f'attempt to assign bytes of size {value_size}'
/*else*/ { __pyx_t_3 = PyTuple_New(4); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 4216, __pyx_L17_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_9 = 0; __pyx_t_10 = 127; __Pyx_INCREF(__pyx_kp_u_attempt_to_assign_bytes_of_size); __pyx_t_9 += 32; __Pyx_GIVEREF(__pyx_kp_u_attempt_to_assign_bytes_of_size); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_kp_u_attempt_to_assign_bytes_of_size); __pyx_t_12 = __Pyx_PyUnicode_From_size_t(__pyx_v_value_size, 0, ' ', 'd'); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 4216, __pyx_L17_error) __Pyx_GOTREF(__pyx_t_12); __pyx_t_9 += __Pyx_PyUnicode_GET_LENGTH(__pyx_t_12); __Pyx_GIVEREF(__pyx_t_12); PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_12); __pyx_t_12 = 0; __Pyx_INCREF(__pyx_kp_u_to_extended_slice_of_size); __pyx_t_9 += 27; __Pyx_GIVEREF(__pyx_kp_u_to_extended_slice_of_size); PyTuple_SET_ITEM(__pyx_t_3, 2, __pyx_kp_u_to_extended_slice_of_size); /* … */ __pyx_t_11 = __Pyx_PyUnicode_Join(__pyx_t_3, 4, __pyx_t_9, __pyx_t_10); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 4216, __pyx_L17_error) __Pyx_GOTREF(__pyx_t_11); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_builtin_ValueError, __pyx_t_11); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 4216, __pyx_L17_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; __Pyx_Raise(__pyx_t_3, 0, 0, 0); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(0, 4216, __pyx_L17_error) } __pyx_L27:;
+4217: f' to extended slice of size {slice_size}')
__pyx_t_12 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_slice_size); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 4217, __pyx_L17_error) __Pyx_GOTREF(__pyx_t_12); __pyx_t_11 = __Pyx_PyObject_FormatSimple(__pyx_t_12, __pyx_empty_unicode); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 4217, __pyx_L17_error) __Pyx_GOTREF(__pyx_t_11); __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; __pyx_t_10 = (__Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_11) > __pyx_t_10) ? __Pyx_PyUnicode_MAX_CHAR_VALUE(__pyx_t_11) : __pyx_t_10; __pyx_t_9 += __Pyx_PyUnicode_GET_LENGTH(__pyx_t_11); __Pyx_GIVEREF(__pyx_t_11); PyTuple_SET_ITEM(__pyx_t_3, 3, __pyx_t_11); __pyx_t_11 = 0;
4218: else:
4219: # Same size: overwrite existing
+4220: if not step:
/*else*/ { __pyx_t_1 = ((!(__pyx_v_step != 0)) != 0); if (__pyx_t_1) { /* … */ goto __pyx_L28; }
+4221: self.write_raw_(start, value_size, Block_At__(value_, 0), None)
__pyx_t_7 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->write_raw_(__pyx_v_self, __pyx_v_start, __pyx_v_value_size, __pyx_f_10bytesparse_2_c_Block_At__(__pyx_v_value_, 0), ((PyObject*)Py_None)); if (unlikely(__pyx_t_7 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 4221, __pyx_L17_error)
4222: else:
+4223: CheckMulAddrU(step, value_size)
/*else*/ {
__pyx_t_7 = __pyx_f_10bytesparse_2_c_CheckMulAddrU(__pyx_v_step, __pyx_v_value_size); if (unlikely(__pyx_t_7 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 4223, __pyx_L17_error)
+4224: CheckAddAddrU(start, step * value_size)
__pyx_t_7 = __pyx_f_10bytesparse_2_c_CheckAddAddrU(__pyx_v_start, (__pyx_v_step * __pyx_v_value_size)); if (unlikely(__pyx_t_7 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 4224, __pyx_L17_error)
+4225: for offset in range(value_size):
__pyx_t_13 = __pyx_v_value_size; __pyx_t_14 = __pyx_t_13; for (__pyx_t_15 = 0; __pyx_t_15 < __pyx_t_14; __pyx_t_15+=1) { __pyx_v_offset = __pyx_t_15;
+4226: self.poke_(start + (step * offset), Block_Get__(value_, offset))
__pyx_t_16 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->poke_(__pyx_v_self, (__pyx_v_start + (__pyx_v_step * __pyx_v_offset)), __pyx_f_10bytesparse_2_c_Block_Get__(__pyx_v_value_, __pyx_v_offset)); if (unlikely(__pyx_t_16 == ((int)-2))) __PYX_ERR(0, 4226, __pyx_L17_error)
}
}
__pyx_L28:;
}
__pyx_L20:;
}
4227: finally:
+4228: Block_Free(value_) # orphan
/*finally:*/ { /*normal exit:*/{ (void)(__pyx_f_10bytesparse_2_c_Block_Free(__pyx_v_value_)); goto __pyx_L18; } __pyx_L17_error:; /*exception exit:*/{ __Pyx_PyThreadState_declare __Pyx_PyThreadState_assign __pyx_t_19 = 0; __pyx_t_20 = 0; __pyx_t_21 = 0; __pyx_t_22 = 0; __pyx_t_23 = 0; __pyx_t_24 = 0; __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_22, &__pyx_t_23, &__pyx_t_24); if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_19, &__pyx_t_20, &__pyx_t_21) < 0)) __Pyx_ErrFetch(&__pyx_t_19, &__pyx_t_20, &__pyx_t_21); __Pyx_XGOTREF(__pyx_t_19); __Pyx_XGOTREF(__pyx_t_20); __Pyx_XGOTREF(__pyx_t_21); __Pyx_XGOTREF(__pyx_t_22); __Pyx_XGOTREF(__pyx_t_23); __Pyx_XGOTREF(__pyx_t_24); __pyx_t_16 = __pyx_lineno; __pyx_t_17 = __pyx_clineno; __pyx_t_18 = __pyx_filename; { (void)(__pyx_f_10bytesparse_2_c_Block_Free(__pyx_v_value_)); } if (PY_MAJOR_VERSION >= 3) { __Pyx_XGIVEREF(__pyx_t_22); __Pyx_XGIVEREF(__pyx_t_23); __Pyx_XGIVEREF(__pyx_t_24); __Pyx_ExceptionReset(__pyx_t_22, __pyx_t_23, __pyx_t_24); } __Pyx_XGIVEREF(__pyx_t_19); __Pyx_XGIVEREF(__pyx_t_20); __Pyx_XGIVEREF(__pyx_t_21); __Pyx_ErrRestore(__pyx_t_19, __pyx_t_20, __pyx_t_21); __pyx_t_19 = 0; __pyx_t_20 = 0; __pyx_t_21 = 0; __pyx_t_22 = 0; __pyx_t_23 = 0; __pyx_t_24 = 0; __pyx_lineno = __pyx_t_16; __pyx_clineno = __pyx_t_17; __pyx_filename = __pyx_t_18; goto __pyx_L1_error; } __pyx_L18:; }
4229: else:
4230: # below: self.poke(key, value)
+4231: address = <addr_t>key
/*else*/ { __pyx_t_4 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_key); if (unlikely((__pyx_t_4 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 4231, __pyx_L1_error) __pyx_v_address = ((addr_t)__pyx_t_4);
+4232: if value is None:
__pyx_t_1 = (__pyx_v_value == Py_None); __pyx_t_2 = (__pyx_t_1 != 0); if (__pyx_t_2) { /* … */ goto __pyx_L33; }
+4233: self.poke_none__(address)
__pyx_t_7 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->poke_none__(__pyx_v_self, __pyx_v_address); if (unlikely(__pyx_t_7 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 4233, __pyx_L1_error)
4234: else:
+4235: if isinstance(value, int):
/*else*/ {
__pyx_t_2 = PyInt_Check(__pyx_v_value);
__pyx_t_1 = (__pyx_t_2 != 0);
if (__pyx_t_1) {
/* … */
goto __pyx_L34;
}
+4236: self.poke_(address, <byte_t>value)
__pyx_t_25 = __Pyx_PyInt_As_byte_t(__pyx_v_value); if (unlikely((__pyx_t_25 == ((byte_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 4236, __pyx_L1_error) __pyx_t_17 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->poke_(__pyx_v_self, __pyx_v_address, ((byte_t)__pyx_t_25)); if (unlikely(__pyx_t_17 == ((int)-2))) __PYX_ERR(0, 4236, __pyx_L1_error)
4237: else:
+4238: if len(value) != 1:
/*else*/ { __pyx_t_9 = PyObject_Length(__pyx_v_value); if (unlikely(__pyx_t_9 == ((Py_ssize_t)-1))) __PYX_ERR(0, 4238, __pyx_L1_error) __pyx_t_1 = ((__pyx_t_9 != 1) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+4239: raise ValueError('expecting single item')
__pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__21, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 4239, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_Raise(__pyx_t_3, 0, 0, 0); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(0, 4239, __pyx_L1_error) /* … */ __pyx_tuple__21 = PyTuple_Pack(1, __pyx_kp_u_expecting_single_item); if (unlikely(!__pyx_tuple__21)) __PYX_ERR(0, 4239, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__21); __Pyx_GIVEREF(__pyx_tuple__21);
+4240: self.poke_(address, <byte_t>value[0])
__pyx_t_3 = __Pyx_GetItemInt(__pyx_v_value, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 4240, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_25 = __Pyx_PyInt_As_byte_t(__pyx_t_3); if (unlikely((__pyx_t_25 == ((byte_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 4240, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __pyx_t_17 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->poke_(__pyx_v_self, __pyx_v_address, ((byte_t)__pyx_t_25)); if (unlikely(__pyx_t_17 == ((int)-2))) __PYX_ERR(0, 4240, __pyx_L1_error) } __pyx_L34:; } __pyx_L33:; } __pyx_L3:;
4241:
+4242: def __delitem__(
/* Python wrapper */ static int __pyx_pw_10bytesparse_2_c_6Memory_51__delitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_key); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_50__delitem__[] = "Deletes data.\n\n Arguments:\n key (slice or int):\n Deletion range or address.\n\n Note:\n This method is not optimized for a :class:`slice` where its `step`\n is an integer greater than 1.\n\n Examples:\n +---+---+---+---+---+---+---+---+---+---+---+---+\n | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11|\n +===+===+===+===+===+===+===+===+===+===+===+===+\n | |[A | B | C | D]| |[$]| |[x | y | z]| |\n +---+---+---+---+---+---+---+---+---+---+---+---+\n | |[A | B | C | y | z]| | | | | | |\n +---+---+---+---+---+---+---+---+---+---+---+---+\n\n >>> memory = Memory(blocks=[[1, b'ABCD'], [6, b'$'], [8, b'xyz']])\n >>> del memory[4:9]\n >>> memory._blocks\n [[1, b'ABCyz']]\n\n ~~~\n\n +---+---+---+---+---+---+---+---+---+---+---+---+\n | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11|\n +===+===+===+===+===+===+===+===+===+===+===+===+\n | |[A | B | C | D]| |[$]| |[x | y | z]| |\n +---+---+---+---+---+---+---+---+---+---+---+---+\n | |[A | B | C | D]| |[$]| |[x | z]| | |\n +---+---+---+---+---+---+---+---+---+---+---+---+\n | |[A | B | D]| |[$]| |[x | z]| | | |\n +---+---+---+---+---+---+---+---+---+---+---+---+\n | |[A | D]| | |[x]| | | | | | |\n +---+---+---+---+---+---+---+---+---+---+---+---+\n\n >>> memory = Memory(blocks=[[1, b'ABCD'], [6, b'$'], [8, b'xyz']])\n >>> del memory[9]\n >>> memory._blocks\n [[1, b'ABCD'], [6, b'$'], [8, b'xz']]\n >>> del memory[3]\n >>> memory._blocks\n [[1, b'ABD'], [5, b'$'], [7, b'xz']]\n >>> del memory[2:10:3]\n >>> memory._blocks\n [[1, ""b'AD'], [5, b'x']]\n "; #if CYTHON_COMPILING_IN_CPYTHON struct wrapperbase __pyx_wrapperbase_10bytesparse_2_c_6Memory_50__delitem__; #endif static int __pyx_pw_10bytesparse_2_c_6Memory_51__delitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_key) { int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__delitem__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_50__delitem__(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), ((PyObject *)__pyx_v_key)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static int __pyx_pf_10bytesparse_2_c_6Memory_50__delitem__(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_key) { PyObject *__pyx_v_key_ = 0; addr_t __pyx_v_start; addr_t __pyx_v_endex; addr_t __pyx_v_step; addr_t __pyx_v_address; PyObject *__pyx_v_key_start = NULL; PyObject *__pyx_v_key_endex = NULL; PyObject *__pyx_v_key_step = NULL; int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__delitem__", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("bytesparse._c.Memory.__delitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_XDECREF(__pyx_v_key_); __Pyx_XDECREF(__pyx_v_key_start); __Pyx_XDECREF(__pyx_v_key_endex); __Pyx_XDECREF(__pyx_v_key_step); __Pyx_RefNannyFinishContext(); return __pyx_r; }
4243: self: 'Memory',
4244: key: Union[Address, slice],
4245: ) -> None:
4246: r"""Deletes data.
4247:
4248: Arguments:
4249: key (slice or int):
4250: Deletion range or address.
4251:
4252: Note:
4253: This method is not optimized for a :class:`slice` where its `step`
4254: is an integer greater than 1.
4255:
4256: Examples:
4257: +---+---+---+---+---+---+---+---+---+---+---+---+
4258: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11|
4259: +===+===+===+===+===+===+===+===+===+===+===+===+
4260: | |[A | B | C | D]| |[$]| |[x | y | z]| |
4261: +---+---+---+---+---+---+---+---+---+---+---+---+
4262: | |[A | B | C | y | z]| | | | | | |
4263: +---+---+---+---+---+---+---+---+---+---+---+---+
4264:
4265: >>> memory = Memory(blocks=[[1, b'ABCD'], [6, b'$'], [8, b'xyz']])
4266: >>> del memory[4:9]
4267: >>> memory._blocks
4268: [[1, b'ABCyz']]
4269:
4270: ~~~
4271:
4272: +---+---+---+---+---+---+---+---+---+---+---+---+
4273: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11|
4274: +===+===+===+===+===+===+===+===+===+===+===+===+
4275: | |[A | B | C | D]| |[$]| |[x | y | z]| |
4276: +---+---+---+---+---+---+---+---+---+---+---+---+
4277: | |[A | B | C | D]| |[$]| |[x | z]| | |
4278: +---+---+---+---+---+---+---+---+---+---+---+---+
4279: | |[A | B | D]| |[$]| |[x | z]| | | |
4280: +---+---+---+---+---+---+---+---+---+---+---+---+
4281: | |[A | D]| | |[x]| | | | | | |
4282: +---+---+---+---+---+---+---+---+---+---+---+---+
4283:
4284: >>> memory = Memory(blocks=[[1, b'ABCD'], [6, b'$'], [8, b'xyz']])
4285: >>> del memory[9]
4286: >>> memory._blocks
4287: [[1, b'ABCD'], [6, b'$'], [8, b'xz']]
4288: >>> del memory[3]
4289: >>> memory._blocks
4290: [[1, b'ABD'], [5, b'$'], [7, b'xz']]
4291: >>> del memory[2:10:3]
4292: >>> memory._blocks
4293: [[1, b'AD'], [5, b'x']]
4294: """
4295: cdef:
4296: slice key_
4297: addr_t start
4298: addr_t endex
4299: addr_t step
4300: addr_t address
4301:
+4302: if Rack_Length(self._):
__pyx_t_1 = (__pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_self->_) != 0); if (__pyx_t_1) { /* … */ }
+4303: if isinstance(key, slice):
__pyx_t_1 = PySlice_Check(__pyx_v_key);
__pyx_t_2 = (__pyx_t_1 != 0);
if (__pyx_t_2) {
/* … */
goto __pyx_L4;
}
+4304: key_ = <slice>key
__pyx_t_3 = __pyx_v_key;
__Pyx_INCREF(__pyx_t_3);
__pyx_v_key_ = ((PyObject*)__pyx_t_3);
__pyx_t_3 = 0;
+4305: key_start = key_.start
__pyx_t_3 = ((PySliceObject*)__pyx_v_key_)->start;
__Pyx_INCREF(__pyx_t_3);
__pyx_v_key_start = __pyx_t_3;
__pyx_t_3 = 0;
+4306: key_endex = key_.stop
__pyx_t_3 = ((PySliceObject*)__pyx_v_key_)->stop;
__Pyx_INCREF(__pyx_t_3);
__pyx_v_key_endex = __pyx_t_3;
__pyx_t_3 = 0;
+4307: start = self.start_() if key_start is None else <addr_t>key_start
__pyx_t_2 = (__pyx_v_key_start == Py_None); if ((__pyx_t_2 != 0)) { __pyx_t_4 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->start_(__pyx_v_self); } else { __pyx_t_5 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_key_start); if (unlikely((__pyx_t_5 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 4307, __pyx_L1_error) __pyx_t_4 = ((addr_t)__pyx_t_5); } __pyx_v_start = __pyx_t_4;
+4308: endex = self.endex_() if key_endex is None else <addr_t>key_endex
__pyx_t_2 = (__pyx_v_key_endex == Py_None); if ((__pyx_t_2 != 0)) { __pyx_t_4 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->endex_(__pyx_v_self); } else { __pyx_t_5 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_key_endex); if (unlikely((__pyx_t_5 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 4308, __pyx_L1_error) __pyx_t_4 = ((addr_t)__pyx_t_5); } __pyx_v_endex = __pyx_t_4;
4309:
+4310: if start < endex:
__pyx_t_2 = ((__pyx_v_start < __pyx_v_endex) != 0); if (__pyx_t_2) { /* … */ }
+4311: key_step = key_.step
__pyx_t_3 = ((PySliceObject*)__pyx_v_key_)->step;
__Pyx_INCREF(__pyx_t_3);
__pyx_v_key_step = __pyx_t_3;
__pyx_t_3 = 0;
+4312: if key_step is None or key_step is 1 or key_step == 1:
__pyx_t_1 = (__pyx_v_key_step == Py_None); __pyx_t_6 = (__pyx_t_1 != 0); if (!__pyx_t_6) { } else { __pyx_t_2 = __pyx_t_6; goto __pyx_L7_bool_binop_done; } __pyx_t_6 = (__pyx_v_key_step == __pyx_int_1); __pyx_t_1 = (__pyx_t_6 != 0); if (!__pyx_t_1) { } else { __pyx_t_2 = __pyx_t_1; goto __pyx_L7_bool_binop_done; } __pyx_t_3 = __Pyx_PyInt_EqObjC(__pyx_v_key_step, __pyx_int_1, 1, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 4312, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 4312, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __pyx_t_2 = __pyx_t_1; __pyx_L7_bool_binop_done:; if (__pyx_t_2) { /* … */ goto __pyx_L6; }
+4313: self._erase_(start, endex, True, True) # delete
__pyx_t_2 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_erase_(__pyx_v_self, __pyx_v_start, __pyx_v_endex, 1, 1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 4313, __pyx_L1_error)
4314:
+4315: elif key_step > 1:
__pyx_t_3 = PyObject_RichCompare(__pyx_v_key_step, __pyx_int_1, Py_GT); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 4315, __pyx_L1_error) __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 4315, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; if (__pyx_t_2) { /* … */ } __pyx_L6:;
+4316: step = <addr_t>key_step - 1
__pyx_t_4 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_key_step); if (unlikely((__pyx_t_4 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 4316, __pyx_L1_error) __pyx_v_step = (((addr_t)__pyx_t_4) - 1);
+4317: address = start
__pyx_v_address = __pyx_v_start;
+4318: while address < endex:
while (1) { __pyx_t_2 = ((__pyx_v_address < __pyx_v_endex) != 0); if (!__pyx_t_2) break;
+4319: self._erase_(address, address + 1, True, True) # delete
__pyx_t_2 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_erase_(__pyx_v_self, __pyx_v_address, (__pyx_v_address + 1), 1, 1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 4319, __pyx_L1_error)
+4320: address += step
__pyx_v_address = (__pyx_v_address + __pyx_v_step);
+4321: endex -= 1
__pyx_v_endex = (__pyx_v_endex - 1); }
4322: else:
+4323: address = <addr_t>key
/*else*/ { __pyx_t_4 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_key); if (unlikely((__pyx_t_4 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 4323, __pyx_L1_error) __pyx_v_address = ((addr_t)__pyx_t_4);
+4324: self._erase_(address, address + 1, True, True) # delete
__pyx_t_2 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_erase_(__pyx_v_self, __pyx_v_address, (__pyx_v_address + 1), 1, 1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 4324, __pyx_L1_error)
}
__pyx_L4:;
4325:
+4326: cdef vint append_(self, byte_t value) except -1:
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_6Memory_append_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, byte_t __pyx_v_value) { Rack_ *__pyx_v_blocks; size_t __pyx_v_block_count; Block_ *__pyx_v_block; __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("append_", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_7); __Pyx_XDECREF(__pyx_t_8); __Pyx_XDECREF(__pyx_t_9); __Pyx_AddTraceback("bytesparse._c.Memory.append_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
4327: cdef:
+4328: Rack_* blocks = self._
__pyx_t_1 = __pyx_v_self->_; __pyx_v_blocks = __pyx_t_1;
4329: size_t block_count
4330: Block_* block
4331:
+4332: block_count = Rack_Length(blocks)
__pyx_v_block_count = __pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks);
+4333: if block_count:
__pyx_t_2 = (__pyx_v_block_count != 0); if (__pyx_t_2) { /* … */ goto __pyx_L3; }
+4334: block = Block_Append(Rack_Last_(blocks), value)
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Block_Append(__pyx_f_10bytesparse_2_c_Rack_Last_(__pyx_v_blocks), __pyx_v_value); if (unlikely(__pyx_t_3 == ((Block_ *)NULL))) __PYX_ERR(0, 4334, __pyx_L1_error)
__pyx_v_block = __pyx_t_3;
+4335: Rack_Set__(blocks, block_count - 1, block) # update pointer
(void)(__pyx_f_10bytesparse_2_c_Rack_Set__(__pyx_v_blocks, (__pyx_v_block_count - 1), __pyx_v_block));
4336: else:
+4337: block = Block_Create(0, 1, &value)
/*else*/ {
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Block_Create(0, 1, (&__pyx_v_value)); if (unlikely(__pyx_t_3 == ((Block_ *)NULL))) __PYX_ERR(0, 4337, __pyx_L1_error)
__pyx_v_block = __pyx_t_3;
+4338: try:
{ /*try:*/ { /* … */ } __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; goto __pyx_L9_try_end; __pyx_L4_error:; /* … */ __Pyx_XGIVEREF(__pyx_t_4); __Pyx_XGIVEREF(__pyx_t_5); __Pyx_XGIVEREF(__pyx_t_6); __Pyx_ExceptionReset(__pyx_t_4, __pyx_t_5, __pyx_t_6); goto __pyx_L1_error; __pyx_L9_try_end:; } } __pyx_L3:;
+4339: self._ = blocks = Rack_Append(blocks, block)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_Rack_Append(__pyx_v_blocks, __pyx_v_block); if (unlikely(__pyx_t_1 == ((Rack_ *)NULL))) __PYX_ERR(0, 4339, __pyx_L4_error)
__pyx_v_self->_ = __pyx_t_1;
__pyx_v_blocks = __pyx_t_1;
+4340: except:
/*except:*/ { __Pyx_AddTraceback("bytesparse._c.Memory.append_", __pyx_clineno, __pyx_lineno, __pyx_filename); if (__Pyx_GetException(&__pyx_t_7, &__pyx_t_8, &__pyx_t_9) < 0) __PYX_ERR(0, 4340, __pyx_L6_except_error) __Pyx_GOTREF(__pyx_t_7); __Pyx_GOTREF(__pyx_t_8); __Pyx_GOTREF(__pyx_t_9);
+4341: Block_Free(block) # orphan
(void)(__pyx_f_10bytesparse_2_c_Block_Free(__pyx_v_block));
+4342: raise
__Pyx_GIVEREF(__pyx_t_7); __Pyx_GIVEREF(__pyx_t_8); __Pyx_XGIVEREF(__pyx_t_9); __Pyx_ErrRestoreWithState(__pyx_t_7, __pyx_t_8, __pyx_t_9); __pyx_t_7 = 0; __pyx_t_8 = 0; __pyx_t_9 = 0; __PYX_ERR(0, 4342, __pyx_L6_except_error) } __pyx_L6_except_error:;
4343:
+4344: def append(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_53append(PyObject *__pyx_v_self, PyObject *__pyx_v_item); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_52append[] = "Memory.append(self: u'Memory', item: Union[AnyBytes, Value]) -> None\nAppends a single item.\n\n Arguments:\n item (int):\n Value to append. Can be a single byte string or integer.\n\n Examples:\n >>> memory = Memory()\n >>> memory.append(b'$')\n >>> memory._blocks\n [[0, b'$']]\n\n ~~~\n\n >>> memory = Memory()\n >>> memory.append(3)\n >>> memory._blocks\n [[0, b'\\x03']]\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_53append(PyObject *__pyx_v_self, PyObject *__pyx_v_item) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("append (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_52append(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), ((PyObject *)__pyx_v_item)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_52append(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_item) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("append", 0); /* … */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_6); __Pyx_AddTraceback("bytesparse._c.Memory.append", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
4345: self: 'Memory',
4346: item: Union[AnyBytes, Value],
4347: ) -> None:
4348: r"""Appends a single item.
4349:
4350: Arguments:
4351: item (int):
4352: Value to append. Can be a single byte string or integer.
4353:
4354: Examples:
4355: >>> memory = Memory()
4356: >>> memory.append(b'$')
4357: >>> memory._blocks
4358: [[0, b'$']]
4359:
4360: ~~~
4361:
4362: >>> memory = Memory()
4363: >>> memory.append(3)
4364: >>> memory._blocks
4365: [[0, b'\x03']]
4366: """
4367:
+4368: if isinstance(item, int):
__pyx_t_1 = PyInt_Check(__pyx_v_item);
__pyx_t_2 = (__pyx_t_1 != 0);
if (__pyx_t_2) {
/* … */
goto __pyx_L3;
}
+4369: self.append_(<byte_t>item)
__pyx_t_3 = __Pyx_PyInt_As_byte_t(__pyx_v_item); if (unlikely((__pyx_t_3 == ((byte_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 4369, __pyx_L1_error) __pyx_t_4 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->append_(__pyx_v_self, ((byte_t)__pyx_t_3)); if (unlikely(__pyx_t_4 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 4369, __pyx_L1_error)
4370: else:
+4371: if len(item) != 1:
/*else*/ { __pyx_t_5 = PyObject_Length(__pyx_v_item); if (unlikely(__pyx_t_5 == ((Py_ssize_t)-1))) __PYX_ERR(0, 4371, __pyx_L1_error) __pyx_t_2 = ((__pyx_t_5 != 1) != 0); if (unlikely(__pyx_t_2)) { /* … */ }
+4372: raise ValueError('expecting single item')
__pyx_t_6 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__21, NULL); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 4372, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __Pyx_Raise(__pyx_t_6, 0, 0, 0); __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __PYX_ERR(0, 4372, __pyx_L1_error)
+4373: self.append_(<byte_t>item[0])
__pyx_t_6 = __Pyx_GetItemInt(__pyx_v_item, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 4373, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __pyx_t_3 = __Pyx_PyInt_As_byte_t(__pyx_t_6); if (unlikely((__pyx_t_3 == ((byte_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 4373, __pyx_L1_error) __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __pyx_t_4 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->append_(__pyx_v_self, ((byte_t)__pyx_t_3)); if (unlikely(__pyx_t_4 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 4373, __pyx_L1_error) } __pyx_L3:;
4374:
+4375: cdef vint extend_same_(self, Memory items, addr_t offset) except -1:
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_6Memory_extend_same_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_items, addr_t __pyx_v_offset) { addr_t __pyx_v_content_endex; __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("extend_same_", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Memory.extend_same_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
4376: cdef:
+4377: addr_t content_endex = self.content_endex_()
__pyx_v_content_endex = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->content_endex_(__pyx_v_self);
4378:
+4379: CheckAddAddrU(content_endex, offset)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_CheckAddAddrU(__pyx_v_content_endex, __pyx_v_offset); if (unlikely(__pyx_t_1 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 4379, __pyx_L1_error)
+4380: offset += content_endex
__pyx_v_offset = (__pyx_v_offset + __pyx_v_content_endex);
+4381: self.write_same_(offset, items, False, None)
__pyx_t_1 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->write_same_(__pyx_v_self, __pyx_v_offset, __pyx_v_items, 0, ((PyObject*)Py_None)); if (unlikely(__pyx_t_1 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 4381, __pyx_L1_error)
4382:
+4383: cdef vint extend_raw_(self, size_t items_size, const byte_t* items_ptr, addr_t offset) except -1:
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_6Memory_extend_raw_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, size_t __pyx_v_items_size, byte_t const *__pyx_v_items_ptr, addr_t __pyx_v_offset) { addr_t __pyx_v_content_endex; __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("extend_raw_", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Memory.extend_raw_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
4384: cdef:
+4385: addr_t content_endex = self.content_endex_()
__pyx_v_content_endex = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->content_endex_(__pyx_v_self);
4386:
+4387: CheckAddAddrU(content_endex, offset)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_CheckAddAddrU(__pyx_v_content_endex, __pyx_v_offset); if (unlikely(__pyx_t_1 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 4387, __pyx_L1_error)
+4388: offset += content_endex
__pyx_v_offset = (__pyx_v_offset + __pyx_v_content_endex);
+4389: CheckAddAddrU(offset, items_size)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_CheckAddAddrU(__pyx_v_offset, __pyx_v_items_size); if (unlikely(__pyx_t_1 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 4389, __pyx_L1_error)
+4390: self.write_raw_(offset, items_size, items_ptr, None)
__pyx_t_1 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->write_raw_(__pyx_v_self, __pyx_v_offset, __pyx_v_items_size, __pyx_v_items_ptr, ((PyObject*)Py_None)); if (unlikely(__pyx_t_1 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 4390, __pyx_L1_error)
4391:
+4392: def extend(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_55extend(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_54extend[] = "Memory.extend(self: u'Memory', items: Union[AnyBytes, u'Memory'], offset: Address = 0) -> None\nConcatenates items.\n\n Equivalent to ``self += items``.\n\n Arguments:\n items (items):\n Items to append at the end of the current virtual space.\n\n If a :obj:`list`, it is interpreted as a sequence of\n non-overlapping blocks, sorted by start address.\n\n offset (int):\n Optional offset w.r.t. :attr:`content_endex`.\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_55extend(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_items = 0; PyObject *__pyx_v_offset = 0; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("extend (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_items,&__pyx_n_s_offset,0}; PyObject* values[2] = {0,0}; values[1] = ((PyObject *)__pyx_int_0); if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_items)) != 0)) kw_args--; else goto __pyx_L5_argtuple_error; CYTHON_FALLTHROUGH; case 1: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_offset); if (value) { values[1] = value; kw_args--; } } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "extend") < 0)) __PYX_ERR(0, 4392, __pyx_L3_error) } } else { switch (PyTuple_GET_SIZE(__pyx_args)) { case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); break; default: goto __pyx_L5_argtuple_error; } } __pyx_v_items = values[0]; __pyx_v_offset = values[1]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("extend", 0, 1, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 4392, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("bytesparse._c.Memory.extend", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_54extend(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), __pyx_v_items, __pyx_v_offset); int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_54extend(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_items, PyObject *__pyx_v_offset) { __Pyx_memviewslice __pyx_v_items_view = { 0, 0, { 0 }, { 0 }, { 0 } }; byte_t __pyx_v_items_value; size_t __pyx_v_items_size; byte_t const *__pyx_v_items_ptr; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("extend", 0); /* … */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __PYX_XDEC_MEMVIEW(&__pyx_t_7, 1); __Pyx_AddTraceback("bytesparse._c.Memory.extend", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __PYX_XDEC_MEMVIEW(&__pyx_v_items_view, 1); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
4393: self: 'Memory',
4394: items: Union[AnyBytes, 'Memory'],
4395: offset: Address = 0,
4396: ) -> None:
4397: r"""Concatenates items.
4398:
4399: Equivalent to ``self += items``.
4400:
4401: Arguments:
4402: items (items):
4403: Items to append at the end of the current virtual space.
4404:
4405: If a :obj:`list`, it is interpreted as a sequence of
4406: non-overlapping blocks, sorted by start address.
4407:
4408: offset (int):
4409: Optional offset w.r.t. :attr:`content_endex`.
4410: """
4411: cdef:
4412: const byte_t[:] items_view
4413: byte_t items_value
4414: size_t items_size
4415: const byte_t* items_ptr
4416:
+4417: if offset < 0:
__pyx_t_1 = PyObject_RichCompare(__pyx_v_offset, __pyx_int_0, Py_LT); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 4417, __pyx_L1_error) __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 4417, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; if (unlikely(__pyx_t_2)) { /* … */ }
+4418: raise ValueError('negative extension offset')
__pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__22, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 4418, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_Raise(__pyx_t_1, 0, 0, 0); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __PYX_ERR(0, 4418, __pyx_L1_error) /* … */ __pyx_tuple__22 = PyTuple_Pack(1, __pyx_kp_u_negative_extension_offset); if (unlikely(!__pyx_tuple__22)) __PYX_ERR(0, 4418, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__22); __Pyx_GIVEREF(__pyx_tuple__22);
4419:
+4420: if isinstance(items, Memory):
__pyx_t_2 = __Pyx_TypeCheck(__pyx_v_items, __pyx_ptype_10bytesparse_2_c_Memory);
__pyx_t_3 = (__pyx_t_2 != 0);
if (__pyx_t_3) {
/* … */
goto __pyx_L4;
}
+4421: self.extend_same_(items, <addr_t>offset)
if (!(likely(((__pyx_v_items) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_items, __pyx_ptype_10bytesparse_2_c_Memory))))) __PYX_ERR(0, 4421, __pyx_L1_error) __pyx_t_4 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_offset); if (unlikely((__pyx_t_4 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 4421, __pyx_L1_error) __pyx_t_5 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->extend_same_(__pyx_v_self, ((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_items), ((addr_t)__pyx_t_4)); if (unlikely(__pyx_t_5 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 4421, __pyx_L1_error)
4422: else:
+4423: if isinstance(items, int):
/*else*/ {
__pyx_t_3 = PyInt_Check(__pyx_v_items);
__pyx_t_2 = (__pyx_t_3 != 0);
if (__pyx_t_2) {
/* … */
goto __pyx_L5;
}
+4424: items_value = <byte_t>items
__pyx_t_6 = __Pyx_PyInt_As_byte_t(__pyx_v_items); if (unlikely((__pyx_t_6 == ((byte_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 4424, __pyx_L1_error) __pyx_v_items_value = ((byte_t)__pyx_t_6);
+4425: items_size = 1
__pyx_v_items_size = 1;
+4426: items_ptr = &items_value
__pyx_v_items_ptr = (&__pyx_v_items_value);
4427: else:
+4428: items_view = items
/*else*/ { __pyx_t_7 = __Pyx_PyObject_to_MemoryviewSlice_ds_nn_byte_t__const__(__pyx_v_items, 0); if (unlikely(!__pyx_t_7.memview)) __PYX_ERR(0, 4428, __pyx_L1_error) __pyx_v_items_view = __pyx_t_7; __pyx_t_7.memview = NULL; __pyx_t_7.data = NULL;
+4429: items_size = len(items_view)
__pyx_t_8 = __Pyx_MemoryView_Len(__pyx_v_items_view);
__pyx_v_items_size = __pyx_t_8;
4430: with cython.boundscheck(False):
+4431: items_ptr = &items_view[0]
__pyx_t_9 = 0; if (__pyx_t_9 < 0) __pyx_t_9 += __pyx_v_items_view.shape[0]; __pyx_v_items_ptr = (&(*((byte_t const *) ( /* dim=0 */ (__pyx_v_items_view.data + __pyx_t_9 * __pyx_v_items_view.strides[0]) )))); } __pyx_L5:;
4432:
+4433: self.extend_raw_(items_size, items_ptr, <addr_t>offset)
__pyx_t_4 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_offset); if (unlikely((__pyx_t_4 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 4433, __pyx_L1_error) __pyx_t_5 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->extend_raw_(__pyx_v_self, __pyx_v_items_size, __pyx_v_items_ptr, ((addr_t)__pyx_t_4)); if (unlikely(__pyx_t_5 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 4433, __pyx_L1_error) } __pyx_L4:;
4434:
+4435: cdef int pop_last_(self) except -2:
static int __pyx_f_10bytesparse_2_c_6Memory_pop_last_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { Rack_ *__pyx_v_blocks; size_t __pyx_v_block_count; Block_ *__pyx_v_block; byte_t __pyx_v_backup; int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("pop_last_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Memory.pop_last_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -2; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
4436: cdef:
+4437: Rack_* blocks = self._
__pyx_t_1 = __pyx_v_self->_; __pyx_v_blocks = __pyx_t_1;
+4438: size_t block_count = Rack_Length(blocks)
__pyx_v_block_count = __pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks);
4439: Block_* block
4440: byte_t backup
4441:
+4442: if block_count:
__pyx_t_2 = (__pyx_v_block_count != 0); if (__pyx_t_2) { /* … */ }
+4443: block = Rack_Last_(blocks)
__pyx_v_block = __pyx_f_10bytesparse_2_c_Rack_Last_(__pyx_v_blocks);
+4444: if Block_Length(block) > 1:
__pyx_t_2 = ((__pyx_f_10bytesparse_2_c_Block_Length(__pyx_v_block) > 1) != 0); if (__pyx_t_2) { /* … */ goto __pyx_L4; }
+4445: block = Block_Pop__(block, &backup)
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Block_Pop__(__pyx_v_block, (&__pyx_v_backup)); if (unlikely(__pyx_t_3 == ((Block_ *)NULL))) __PYX_ERR(0, 4445, __pyx_L1_error)
__pyx_v_block = __pyx_t_3;
+4446: Rack_Set__(blocks, block_count - 1, block) # update pointer
(void)(__pyx_f_10bytesparse_2_c_Rack_Set__(__pyx_v_blocks, (__pyx_v_block_count - 1), __pyx_v_block));
4447: else:
+4448: backup = Block_Get__(block, 0)
/*else*/ { __pyx_v_backup = __pyx_f_10bytesparse_2_c_Block_Get__(__pyx_v_block, 0);
+4449: self._ = blocks = Rack_Pop__(blocks, NULL)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_Rack_Pop__(__pyx_v_blocks, NULL); if (unlikely(__pyx_t_1 == ((Rack_ *)NULL))) __PYX_ERR(0, 4449, __pyx_L1_error)
__pyx_v_self->_ = __pyx_t_1;
__pyx_v_blocks = __pyx_t_1;
}
__pyx_L4:;
+4450: return backup
__pyx_r = __pyx_v_backup; goto __pyx_L0;
4451: else:
+4452: return -1
/*else*/ { __pyx_r = -1; goto __pyx_L0; }
4453:
+4454: cdef int pop_at_(self, addr_t address) except -2:
static int __pyx_f_10bytesparse_2_c_6Memory_pop_at_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, addr_t __pyx_v_address) { int __pyx_v_backup; int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("pop_at_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Memory.pop_at_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -2; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
4455: cdef:
4456: int backup
4457:
+4458: backup = self.peek_(address)
__pyx_t_1 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->peek_(__pyx_v_self, __pyx_v_address); if (unlikely(__pyx_t_1 == ((int)-2))) __PYX_ERR(0, 4458, __pyx_L1_error)
__pyx_v_backup = __pyx_t_1;
+4459: self._erase_(address, address + 1, True, True) # delete
__pyx_t_2 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_erase_(__pyx_v_self, __pyx_v_address, (__pyx_v_address + 1), 1, 1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 4459, __pyx_L1_error)
+4460: return backup
__pyx_r = __pyx_v_backup; goto __pyx_L0;
4461:
+4462: def pop(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_57pop(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_56pop[] = "Memory.pop(self: u'Memory', address: Optional[Address] = None) -> Optional[Value]\nTakes a value away.\n\n Arguments:\n address (int):\n Address of the byte to pop.\n If ``None``, the very last byte is popped.\n\n Return:\n int: Value at `address`; ``None`` within emptiness.\n\n Example:\n +---+---+---+---+---+---+---+---+---+---+---+---+\n | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11|\n +===+===+===+===+===+===+===+===+===+===+===+===+\n | |[A | B | C | D]| |[$]| |[x | y | z]| |\n +---+---+---+---+---+---+---+---+---+---+---+---+\n | |[A | B | C | D]| |[$]| |[x | y]| | |\n +---+---+---+---+---+---+---+---+---+---+---+---+\n | |[A | B | D]| |[$]| |[x | y]| | | |\n +---+---+---+---+---+---+---+---+---+---+---+---+\n\n >>> memory = Memory(blocks=[[1, b'ABCD'], [6, b'$'], [8, b'xyz']])\n >>> memory.pop() # -> ord('z') = 122\n 122\n >>> memory.pop(3) # -> ord('C') = 67\n 67\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_57pop(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_address = 0; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("pop (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_address,0}; PyObject* values[1] = {0}; /* … */ /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_56pop(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_address) { int __pyx_v_value; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("pop", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_5); __Pyx_XDECREF(__pyx_t_6); __Pyx_AddTraceback("bytesparse._c.Memory.pop", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
4463: self: 'Memory',
+4464: address: Optional[Address] = None,
values[0] = ((PyObject *)Py_None); if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_address); if (value) { values[0] = value; kw_args--; } } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "pop") < 0)) __PYX_ERR(0, 4462, __pyx_L3_error) } } else { switch (PyTuple_GET_SIZE(__pyx_args)) { case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } } __pyx_v_address = values[0]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("pop", 0, 0, 1, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 4462, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("bytesparse._c.Memory.pop", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_56pop(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), __pyx_v_address);
4465: ) -> Optional[Value]:
4466: r"""Takes a value away.
4467:
4468: Arguments:
4469: address (int):
4470: Address of the byte to pop.
4471: If ``None``, the very last byte is popped.
4472:
4473: Return:
4474: int: Value at `address`; ``None`` within emptiness.
4475:
4476: Example:
4477: +---+---+---+---+---+---+---+---+---+---+---+---+
4478: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11|
4479: +===+===+===+===+===+===+===+===+===+===+===+===+
4480: | |[A | B | C | D]| |[$]| |[x | y | z]| |
4481: +---+---+---+---+---+---+---+---+---+---+---+---+
4482: | |[A | B | C | D]| |[$]| |[x | y]| | |
4483: +---+---+---+---+---+---+---+---+---+---+---+---+
4484: | |[A | B | D]| |[$]| |[x | y]| | | |
4485: +---+---+---+---+---+---+---+---+---+---+---+---+
4486:
4487: >>> memory = Memory(blocks=[[1, b'ABCD'], [6, b'$'], [8, b'xyz']])
4488: >>> memory.pop() # -> ord('z') = 122
4489: 122
4490: >>> memory.pop(3) # -> ord('C') = 67
4491: 67
4492: """
4493: cdef:
4494: int value
4495:
+4496: if address is None:
__pyx_t_1 = (__pyx_v_address == Py_None); __pyx_t_2 = (__pyx_t_1 != 0); if (__pyx_t_2) { /* … */ goto __pyx_L3; }
+4497: value = self.pop_last_()
__pyx_t_3 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->pop_last_(__pyx_v_self); if (unlikely(__pyx_t_3 == ((int)-2))) __PYX_ERR(0, 4497, __pyx_L1_error)
__pyx_v_value = __pyx_t_3;
4498: else:
+4499: value = self.pop_at_(<addr_t>address)
/*else*/ { __pyx_t_4 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_address); if (unlikely((__pyx_t_4 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 4499, __pyx_L1_error) __pyx_t_3 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->pop_at_(__pyx_v_self, ((addr_t)__pyx_t_4)); if (unlikely(__pyx_t_3 == ((int)-2))) __PYX_ERR(0, 4499, __pyx_L1_error) __pyx_v_value = __pyx_t_3; } __pyx_L3:;
+4500: return None if value < 0 else value
__Pyx_XDECREF(__pyx_r); if (((__pyx_v_value < 0) != 0)) { __Pyx_INCREF(Py_None); __pyx_t_5 = Py_None; } else { __pyx_t_6 = __Pyx_PyInt_From_int(__pyx_v_value); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 4500, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __pyx_t_5 = __pyx_t_6; __pyx_t_6 = 0; } __pyx_r = __pyx_t_5; __pyx_t_5 = 0; goto __pyx_L0;
4501:
+4502: cdef BlockView _memview(self):
static struct __pyx_obj_10bytesparse_2_c_BlockView *__pyx_f_10bytesparse_2_c_6Memory__memview(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { Rack_ *__pyx_v_blocks; size_t __pyx_v_block_count; addr_t __pyx_v_start; addr_t __pyx_v_endex; struct __pyx_obj_10bytesparse_2_c_BlockView *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("_memview", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_5); __Pyx_AddTraceback("bytesparse._c.Memory._memview", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 0; __pyx_L0:; __Pyx_XGIVEREF((PyObject *)__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
4503: cdef:
+4504: Rack_* blocks = self._
__pyx_t_1 = __pyx_v_self->_; __pyx_v_blocks = __pyx_t_1;
+4505: size_t block_count = Rack_Length(blocks)
__pyx_v_block_count = __pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks);
4506: addr_t start
4507: addr_t endex
4508:
+4509: if not block_count:
__pyx_t_2 = ((!(__pyx_v_block_count != 0)) != 0); if (__pyx_t_2) { /* … */ }
+4510: start = self._trim_start
__pyx_t_3 = __pyx_v_self->_trim_start; __pyx_v_start = __pyx_t_3;
+4511: endex = self._trim_endex
__pyx_t_3 = __pyx_v_self->_trim_endex; __pyx_v_endex = __pyx_t_3;
+4512: if self._trim_start_ and self._trim_endex_ and start < endex - 1:
__pyx_t_4 = (__pyx_v_self->_trim_start_ != 0); if (__pyx_t_4) { } else { __pyx_t_2 = __pyx_t_4; goto __pyx_L5_bool_binop_done; } __pyx_t_4 = (__pyx_v_self->_trim_endex_ != 0); if (__pyx_t_4) { } else { __pyx_t_2 = __pyx_t_4; goto __pyx_L5_bool_binop_done; } __pyx_t_4 = ((__pyx_v_start < (__pyx_v_endex - 1)) != 0); __pyx_t_2 = __pyx_t_4; __pyx_L5_bool_binop_done:; if (unlikely(__pyx_t_2)) { /* … */ }
+4513: raise ValueError('non-contiguous data within range')
__pyx_t_5 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__23, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 4513, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __Pyx_Raise(__pyx_t_5, 0, 0, 0); __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; __PYX_ERR(0, 4513, __pyx_L1_error) /* … */ __pyx_tuple__23 = PyTuple_Pack(1, __pyx_kp_u_non_contiguous_data_within_range); if (unlikely(!__pyx_tuple__23)) __PYX_ERR(0, 4513, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__23); __Pyx_GIVEREF(__pyx_tuple__23);
+4514: return Block_View(Block_Alloc(start, 0, False))
__Pyx_XDECREF(((PyObject *)__pyx_r)); __pyx_t_6 = __pyx_f_10bytesparse_2_c_Block_Alloc(__pyx_v_start, 0, 0); if (unlikely(__pyx_t_6 == ((Block_ *)NULL))) __PYX_ERR(0, 4514, __pyx_L1_error) __pyx_t_5 = ((PyObject *)__pyx_f_10bytesparse_2_c_Block_View(__pyx_t_6)); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 4514, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __pyx_r = ((struct __pyx_obj_10bytesparse_2_c_BlockView *)__pyx_t_5); __pyx_t_5 = 0; goto __pyx_L0;
4515:
+4516: elif block_count == 1:
__pyx_t_2 = ((__pyx_v_block_count == 1) != 0); if (likely(__pyx_t_2)) { /* … */ }
+4517: start = self._trim_start
__pyx_t_3 = __pyx_v_self->_trim_start; __pyx_v_start = __pyx_t_3;
+4518: if self._trim_start_:
__pyx_t_2 = (__pyx_v_self->_trim_start_ != 0); if (__pyx_t_2) { /* … */ }
+4519: if start != Block_Start(Rack_First__(blocks)):
__pyx_t_2 = ((__pyx_v_start != __pyx_f_10bytesparse_2_c_Block_Start(__pyx_f_10bytesparse_2_c_Rack_First__(__pyx_v_blocks))) != 0); if (unlikely(__pyx_t_2)) { /* … */ }
+4520: raise ValueError('non-contiguous data within range')
__pyx_t_5 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__23, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 4520, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __Pyx_Raise(__pyx_t_5, 0, 0, 0); __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; __PYX_ERR(0, 4520, __pyx_L1_error)
4521:
+4522: endex = self._trim_endex
__pyx_t_3 = __pyx_v_self->_trim_endex; __pyx_v_endex = __pyx_t_3;
+4523: if self._trim_endex_:
__pyx_t_2 = (__pyx_v_self->_trim_endex_ != 0); if (__pyx_t_2) { /* … */ }
+4524: if endex != Block_Endex(Rack_Last__(blocks)):
__pyx_t_2 = ((__pyx_v_endex != __pyx_f_10bytesparse_2_c_Block_Endex(__pyx_f_10bytesparse_2_c_Rack_Last__(__pyx_v_blocks))) != 0); if (unlikely(__pyx_t_2)) { /* … */ }
+4525: raise ValueError('non-contiguous data within range')
__pyx_t_5 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__23, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 4525, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __Pyx_Raise(__pyx_t_5, 0, 0, 0); __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; __PYX_ERR(0, 4525, __pyx_L1_error)
4526:
+4527: return Block_View(Rack_First_(blocks))
__Pyx_XDECREF(((PyObject *)__pyx_r)); __pyx_t_5 = ((PyObject *)__pyx_f_10bytesparse_2_c_Block_View(__pyx_f_10bytesparse_2_c_Rack_First_(__pyx_v_blocks))); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 4527, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __pyx_r = ((struct __pyx_obj_10bytesparse_2_c_BlockView *)__pyx_t_5); __pyx_t_5 = 0; goto __pyx_L0;
4528:
4529: else:
+4530: raise ValueError('non-contiguous data within range')
/*else*/ { __pyx_t_5 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__23, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 4530, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __Pyx_Raise(__pyx_t_5, 0, 0, 0); __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; __PYX_ERR(0, 4530, __pyx_L1_error) }
4531:
+4532: def __bytes__(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_59__bytes__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_58__bytes__[] = "Memory.__bytes__(self: u'Memory') -> bytes\nCreates a bytes clone.\n\n Returns:\n :obj:`bytes`: Cloned data.\n\n Raises:\n :obj:`ValueError`: Data not contiguous (see :attr:`contiguous`).\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_59__bytes__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__bytes__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_58__bytes__(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_58__bytes__(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__bytes__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.Memory.__bytes__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
4533: self: 'Memory',
4534: ) -> bytes:
4535: r"""Creates a bytes clone.
4536:
4537: Returns:
4538: :obj:`bytes`: Cloned data.
4539:
4540: Raises:
4541: :obj:`ValueError`: Data not contiguous (see :attr:`contiguous`).
4542: """
4543:
+4544: return bytes(self._memview())
__Pyx_XDECREF(__pyx_r); __pyx_t_1 = ((PyObject *)((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_memview(__pyx_v_self)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 4544, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_t_2 = __Pyx_PyObject_CallOneArg(((PyObject *)(&PyBytes_Type)), __pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 4544, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_r = ((PyObject*)__pyx_t_2); __pyx_t_2 = 0; goto __pyx_L0;
4545:
+4546: def to_bytes(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_61to_bytes(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_60to_bytes[] = "Memory.to_bytes(self: u'Memory') -> bytes\nCreates a bytes clone.\n\n Returns:\n :obj:`bytes`: Cloned data.\n\n Raises:\n :obj:`ValueError`: Data not contiguous (see :attr:`contiguous`).\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_61to_bytes(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("to_bytes (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_60to_bytes(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_60to_bytes(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("to_bytes", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.Memory.to_bytes", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
4547: self: 'Memory',
4548: ) -> bytes:
4549: r"""Creates a bytes clone.
4550:
4551: Returns:
4552: :obj:`bytes`: Cloned data.
4553:
4554: Raises:
4555: :obj:`ValueError`: Data not contiguous (see :attr:`contiguous`).
4556: """
4557:
+4558: return bytes(self._memview())
__Pyx_XDECREF(__pyx_r); __pyx_t_1 = ((PyObject *)((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_memview(__pyx_v_self)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 4558, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_t_2 = __Pyx_PyObject_CallOneArg(((PyObject *)(&PyBytes_Type)), __pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 4558, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_r = ((PyObject*)__pyx_t_2); __pyx_t_2 = 0; goto __pyx_L0;
4559:
+4560: def to_bytearray(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_63to_bytearray(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_62to_bytearray[] = "Memory.to_bytearray(self: u'Memory') -> bytearray\nCreates a bytearray clone.\n\n Arguments:\n copy (bool):\n Creates a clone of the underlying :obj:`bytearray` data\n structure.\n\n Returns:\n :obj:`bytearray`: Cloned data.\n\n Raises:\n :obj:`ValueError`: Data not contiguous (see :attr:`contiguous`).\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_63to_bytearray(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("to_bytearray (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_62to_bytearray(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_62to_bytearray(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("to_bytearray", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.Memory.to_bytearray", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
4561: self: 'Memory',
4562: ) -> bytearray:
4563: r"""Creates a bytearray clone.
4564:
4565: Arguments:
4566: copy (bool):
4567: Creates a clone of the underlying :obj:`bytearray` data
4568: structure.
4569:
4570: Returns:
4571: :obj:`bytearray`: Cloned data.
4572:
4573: Raises:
4574: :obj:`ValueError`: Data not contiguous (see :attr:`contiguous`).
4575: """
4576:
+4577: return bytearray(self._memview())
__Pyx_XDECREF(__pyx_r); __pyx_t_1 = ((PyObject *)((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_memview(__pyx_v_self)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 4577, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_t_2 = __Pyx_PyObject_CallOneArg(((PyObject *)(&PyByteArray_Type)), __pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 4577, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_r = ((PyObject*)__pyx_t_2); __pyx_t_2 = 0; goto __pyx_L0;
4578:
+4579: def to_memoryview(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_65to_memoryview(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_64to_memoryview[] = "Memory.to_memoryview(self: u'Memory') -> memoryview\nCreates a memory view.\n\n Returns:\n :obj:`memoryview`: View over data.\n\n Raises:\n :obj:`ValueError`: Data not contiguous (see :attr:`contiguous`).\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_65to_memoryview(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("to_memoryview (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_64to_memoryview(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_64to_memoryview(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("to_memoryview", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("bytesparse._c.Memory.to_memoryview", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
4580: self: 'Memory',
4581: ) -> memoryview:
4582: r"""Creates a memory view.
4583:
4584: Returns:
4585: :obj:`memoryview`: View over data.
4586:
4587: Raises:
4588: :obj:`ValueError`: Data not contiguous (see :attr:`contiguous`).
4589: """
4590:
+4591: return self._memview()
__Pyx_XDECREF(__pyx_r); __pyx_t_1 = ((PyObject *)((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_memview(__pyx_v_self)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 4591, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0;
4592:
+4593: cdef Memory copy_(self):
static struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_f_10bytesparse_2_c_6Memory_copy_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_memory = 0; struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("copy_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("bytesparse._c.Memory.copy_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 0; __pyx_L0:; __Pyx_XDECREF((PyObject *)__pyx_v_memory); __Pyx_XGIVEREF((PyObject *)__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
4594: cdef:
+4595: Memory memory = Memory()
__pyx_t_1 = __Pyx_PyObject_CallNoArg(((PyObject *)__pyx_ptype_10bytesparse_2_c_Memory)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 4595, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_v_memory = ((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_t_1); __pyx_t_1 = 0;
4596:
+4597: memory._ = Rack_Free(memory._)
__pyx_v_memory->_ = __pyx_f_10bytesparse_2_c_Rack_Free(__pyx_v_memory->_);
+4598: memory._ = Rack_Copy(self._)
__pyx_t_2 = __pyx_f_10bytesparse_2_c_Rack_Copy(__pyx_v_self->_); if (unlikely(__pyx_t_2 == ((Rack_ *)NULL))) __PYX_ERR(0, 4598, __pyx_L1_error)
__pyx_v_memory->_ = __pyx_t_2;
4599:
+4600: memory._trim_start = self._trim_start
__pyx_t_3 = __pyx_v_self->_trim_start; __pyx_v_memory->_trim_start = __pyx_t_3;
+4601: memory._trim_endex = self._trim_endex
__pyx_t_3 = __pyx_v_self->_trim_endex; __pyx_v_memory->_trim_endex = __pyx_t_3;
+4602: memory._trim_start_ = self._trim_start_
__pyx_t_4 = __pyx_v_self->_trim_start_; __pyx_v_memory->_trim_start_ = __pyx_t_4;
+4603: memory._trim_endex_ = self._trim_endex_
__pyx_t_4 = __pyx_v_self->_trim_endex_; __pyx_v_memory->_trim_endex_ = __pyx_t_4;
4604:
+4605: return memory
__Pyx_XDECREF(((PyObject *)__pyx_r)); __Pyx_INCREF(((PyObject *)__pyx_v_memory)); __pyx_r = __pyx_v_memory; goto __pyx_L0;
4606:
+4607: def __copy__(
/* Python wrapper */ static struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_pw_10bytesparse_2_c_6Memory_67__copy__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_66__copy__[] = "Memory.__copy__(self: u'Memory') -> u'Memory'\nCreates a shallow copy.\n\n Note:\n The Cython implementation actually creates a deep copy.\n\n Returns:\n :obj:`Memory`: Shallow copy.\n "; static struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_pw_10bytesparse_2_c_6Memory_67__copy__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__copy__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_66__copy__(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_pf_10bytesparse_2_c_6Memory_66__copy__(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__copy__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("bytesparse._c.Memory.__copy__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF((PyObject *)__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
4608: self: 'Memory',
4609: ) -> 'Memory':
4610: r"""Creates a shallow copy.
4611:
4612: Note:
4613: The Cython implementation actually creates a deep copy.
4614:
4615: Returns:
4616: :obj:`Memory`: Shallow copy.
4617: """
4618:
+4619: return self.copy_()
__Pyx_XDECREF(((PyObject *)__pyx_r)); __pyx_t_1 = ((PyObject *)((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->copy_(__pyx_v_self)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 4619, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_r = ((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_t_1); __pyx_t_1 = 0; goto __pyx_L0;
4620:
+4621: def __deepcopy__(
/* Python wrapper */ static struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_pw_10bytesparse_2_c_6Memory_69__deepcopy__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_68__deepcopy__[] = "Memory.__deepcopy__(self: u'Memory') -> u'Memory'\nCreates a deep copy.\n\n Returns:\n :obj:`Memory`: Deep copy.\n "; static struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_pw_10bytesparse_2_c_6Memory_69__deepcopy__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__deepcopy__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_68__deepcopy__(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_pf_10bytesparse_2_c_6Memory_68__deepcopy__(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__deepcopy__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("bytesparse._c.Memory.__deepcopy__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF((PyObject *)__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
4622: self: 'Memory',
4623: ) -> 'Memory':
4624: r"""Creates a deep copy.
4625:
4626: Returns:
4627: :obj:`Memory`: Deep copy.
4628: """
4629:
+4630: return self.copy_()
__Pyx_XDECREF(((PyObject *)__pyx_r)); __pyx_t_1 = ((PyObject *)((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->copy_(__pyx_v_self)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 4630, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_r = ((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_t_1); __pyx_t_1 = 0; goto __pyx_L0;
4631:
4632: @property
+4633: def contiguous(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_10contiguous_1__get__(PyObject *__pyx_v_self); /*proto*/ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_10contiguous_1__get__(PyObject *__pyx_v_self) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_10contiguous___get__(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_10contiguous___get__(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_4); __Pyx_XDECREF(__pyx_t_6); __Pyx_XDECREF(__pyx_t_7); __Pyx_AddTraceback("bytesparse._c.Memory.contiguous.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
4634: self: 'Memory',
4635: ) -> bool:
4636: r"""bool: Contains contiguous data.
4637:
4638: The memory is considered to have contiguous data if there is no empty
4639: space between blocks.
4640:
4641: If trimming is defined, there must be no empty space also towards it.
4642: """
4643:
+4644: try:
{ /*try:*/ { /* … */ } __pyx_L3_error:; __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; /* … */ __Pyx_XGIVEREF(__pyx_t_1); __Pyx_XGIVEREF(__pyx_t_2); __Pyx_XGIVEREF(__pyx_t_3); __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); goto __pyx_L1_error; __pyx_L7_try_return:; __Pyx_XGIVEREF(__pyx_t_1); __Pyx_XGIVEREF(__pyx_t_2); __Pyx_XGIVEREF(__pyx_t_3); __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); goto __pyx_L0; __pyx_L6_except_return:; __Pyx_XGIVEREF(__pyx_t_1); __Pyx_XGIVEREF(__pyx_t_2); __Pyx_XGIVEREF(__pyx_t_3); __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); goto __pyx_L0; }
+4645: self._memview()
__pyx_t_4 = ((PyObject *)((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_memview(__pyx_v_self)); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 4645, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+4646: return True
__Pyx_XDECREF(__pyx_r); __Pyx_INCREF(Py_True); __pyx_r = Py_True; goto __pyx_L7_try_return;
+4647: except ValueError:
__pyx_t_5 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_ValueError); if (__pyx_t_5) { __Pyx_AddTraceback("bytesparse._c.Memory.contiguous.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); if (__Pyx_GetException(&__pyx_t_4, &__pyx_t_6, &__pyx_t_7) < 0) __PYX_ERR(0, 4647, __pyx_L5_except_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_GOTREF(__pyx_t_6); __Pyx_GOTREF(__pyx_t_7);
+4648: return False
__Pyx_XDECREF(__pyx_r); __Pyx_INCREF(Py_False); __pyx_r = Py_False; __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; goto __pyx_L6_except_return; } goto __pyx_L5_except_error; __pyx_L5_except_error:;
4649:
4650: @property
+4651: def trim_start(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_10trim_start_1__get__(PyObject *__pyx_v_self); /*proto*/ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_10trim_start_1__get__(PyObject *__pyx_v_self) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_10trim_start___get__(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_10trim_start___get__(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.Memory.trim_start.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
4652: self: 'Memory',
4653: ) -> Optional[Address]:
4654: r"""int: Trimming start address.
4655:
4656: Any data before this address is automatically discarded.
4657: Disabled if ``None``.
4658: """
4659:
+4660: return self._trim_start if self._trim_start_ else None
__Pyx_XDECREF(__pyx_r); if ((__pyx_v_self->_trim_start_ != 0)) { __pyx_t_2 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_self->_trim_start); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 4660, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_1 = __pyx_t_2; __pyx_t_2 = 0; } else { __Pyx_INCREF(Py_None); __pyx_t_1 = Py_None; } __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0;
4661:
4662: @trim_start.setter
+4663: def trim_start(
/* Python wrapper */ static int __pyx_pw_10bytesparse_2_c_6Memory_10trim_start_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_trim_start); /*proto*/ static int __pyx_pw_10bytesparse_2_c_6Memory_10trim_start_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_trim_start) { int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_10trim_start_2__set__(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), ((PyObject *)__pyx_v_trim_start)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static int __pyx_pf_10bytesparse_2_c_6Memory_10trim_start_2__set__(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_trim_start) { addr_t __pyx_v_trim_start_; addr_t __pyx_v_trim_endex_; int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__set__", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Memory.trim_start.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
4664: self: 'Memory',
4665: trim_start: Address,
4666: ) -> None:
4667: cdef:
4668: addr_t trim_start_
4669: addr_t trim_endex_
4670:
+4671: if trim_start is None:
__pyx_t_1 = (__pyx_v_trim_start == Py_None); __pyx_t_2 = (__pyx_t_1 != 0); if (__pyx_t_2) { /* … */ goto __pyx_L3; }
+4672: trim_start_ = 0
__pyx_v_trim_start_ = 0;
+4673: self._trim_start_ = False
__pyx_v_self->_trim_start_ = 0;
4674: else:
+4675: trim_start_ = <addr_t>trim_start
/*else*/ { __pyx_t_3 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_trim_start); if (unlikely((__pyx_t_3 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 4675, __pyx_L1_error) __pyx_v_trim_start_ = ((addr_t)__pyx_t_3);
+4676: self._trim_start_ = True
__pyx_v_self->_trim_start_ = 1; } __pyx_L3:;
4677:
+4678: trim_endex_ = self._trim_endex
__pyx_t_3 = __pyx_v_self->_trim_endex; __pyx_v_trim_endex_ = __pyx_t_3;
+4679: if self._trim_start_ and self._trim_endex_ and trim_endex_ < trim_start_:
__pyx_t_1 = (__pyx_v_self->_trim_start_ != 0); if (__pyx_t_1) { } else { __pyx_t_2 = __pyx_t_1; goto __pyx_L5_bool_binop_done; } __pyx_t_1 = (__pyx_v_self->_trim_endex_ != 0); if (__pyx_t_1) { } else { __pyx_t_2 = __pyx_t_1; goto __pyx_L5_bool_binop_done; } __pyx_t_1 = ((__pyx_v_trim_endex_ < __pyx_v_trim_start_) != 0); __pyx_t_2 = __pyx_t_1; __pyx_L5_bool_binop_done:; if (__pyx_t_2) { /* … */ }
+4680: self._trim_endex = trim_endex_ = trim_start_
__pyx_v_self->_trim_endex = __pyx_v_trim_start_; __pyx_v_trim_endex_ = __pyx_v_trim_start_;
4681:
+4682: self._trim_start = trim_start_
__pyx_v_self->_trim_start = __pyx_v_trim_start_;
+4683: if self._trim_start_:
__pyx_t_2 = (__pyx_v_self->_trim_start_ != 0); if (__pyx_t_2) { /* … */ }
+4684: self._crop_(trim_start_, trim_endex_, None)
__pyx_t_4 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_crop_(__pyx_v_self, __pyx_v_trim_start_, __pyx_v_trim_endex_, ((PyObject*)Py_None)); if (unlikely(__pyx_t_4 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 4684, __pyx_L1_error)
4685:
4686: @property
+4687: def trim_endex(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_10trim_endex_1__get__(PyObject *__pyx_v_self); /*proto*/ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_10trim_endex_1__get__(PyObject *__pyx_v_self) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_10trim_endex___get__(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_10trim_endex___get__(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("bytesparse._c.Memory.trim_endex.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
4688: self: 'Memory',
4689: ) -> Optional[Address]:
4690: r"""int: Trimming exclusive end address.
4691:
4692: Any data at or after this address is automatically discarded.
4693: Disabled if ``None``.
4694: """
4695:
+4696: return self._trim_endex if self._trim_endex_ else None
__Pyx_XDECREF(__pyx_r); if ((__pyx_v_self->_trim_endex_ != 0)) { __pyx_t_2 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_self->_trim_endex); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 4696, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_1 = __pyx_t_2; __pyx_t_2 = 0; } else { __Pyx_INCREF(Py_None); __pyx_t_1 = Py_None; } __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0;
4697:
4698: @trim_endex.setter
+4699: def trim_endex(
/* Python wrapper */ static int __pyx_pw_10bytesparse_2_c_6Memory_10trim_endex_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_trim_endex); /*proto*/ static int __pyx_pw_10bytesparse_2_c_6Memory_10trim_endex_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_trim_endex) { int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_10trim_endex_2__set__(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), ((PyObject *)__pyx_v_trim_endex)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static int __pyx_pf_10bytesparse_2_c_6Memory_10trim_endex_2__set__(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_trim_endex) { addr_t __pyx_v_trim_start_; addr_t __pyx_v_trim_endex_; int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__set__", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Memory.trim_endex.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
4700: self: 'Memory',
4701: trim_endex: Address,
4702: ) -> None:
4703: cdef:
4704: addr_t trim_start_
4705: addr_t trim_endex_
4706:
+4707: if trim_endex is None:
__pyx_t_1 = (__pyx_v_trim_endex == Py_None); __pyx_t_2 = (__pyx_t_1 != 0); if (__pyx_t_2) { /* … */ goto __pyx_L3; }
+4708: trim_endex_ = ADDR_MAX
__pyx_v_trim_endex_ = ADDR_MAX;
+4709: self._trim_endex_ = False
__pyx_v_self->_trim_endex_ = 0;
4710: else:
+4711: trim_endex_ = <addr_t>trim_endex
/*else*/ { __pyx_t_3 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_trim_endex); if (unlikely((__pyx_t_3 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 4711, __pyx_L1_error) __pyx_v_trim_endex_ = ((addr_t)__pyx_t_3);
+4712: self._trim_endex_ = True
__pyx_v_self->_trim_endex_ = 1; } __pyx_L3:;
4713:
+4714: trim_start_ = self._trim_start
__pyx_t_3 = __pyx_v_self->_trim_start; __pyx_v_trim_start_ = __pyx_t_3;
+4715: if self._trim_start_ and self._trim_endex_ and trim_endex_ < trim_start_:
__pyx_t_1 = (__pyx_v_self->_trim_start_ != 0); if (__pyx_t_1) { } else { __pyx_t_2 = __pyx_t_1; goto __pyx_L5_bool_binop_done; } __pyx_t_1 = (__pyx_v_self->_trim_endex_ != 0); if (__pyx_t_1) { } else { __pyx_t_2 = __pyx_t_1; goto __pyx_L5_bool_binop_done; } __pyx_t_1 = ((__pyx_v_trim_endex_ < __pyx_v_trim_start_) != 0); __pyx_t_2 = __pyx_t_1; __pyx_L5_bool_binop_done:; if (__pyx_t_2) { /* … */ }
+4716: self._trim_start = trim_start_ = trim_endex_
__pyx_v_self->_trim_start = __pyx_v_trim_endex_; __pyx_v_trim_start_ = __pyx_v_trim_endex_;
4717:
+4718: self._trim_endex = trim_endex_
__pyx_v_self->_trim_endex = __pyx_v_trim_endex_;
+4719: if self._trim_endex_:
__pyx_t_2 = (__pyx_v_self->_trim_endex_ != 0); if (__pyx_t_2) { /* … */ }
+4720: self._crop_(trim_start_, trim_endex_, None)
__pyx_t_4 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_crop_(__pyx_v_self, __pyx_v_trim_start_, __pyx_v_trim_endex_, ((PyObject*)Py_None)); if (unlikely(__pyx_t_4 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 4720, __pyx_L1_error)
4721:
4722: @property
+4723: def trim_span(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_9trim_span_1__get__(PyObject *__pyx_v_self); /*proto*/ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_9trim_span_1__get__(PyObject *__pyx_v_self) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_9trim_span___get__(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_9trim_span___get__(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("bytesparse._c.Memory.trim_span.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
4724: self: 'Memory',
4725: ) -> OpenInterval:
4726: r"""tuple of int: Trimming span addresses.
4727:
4728: A :obj:`tuple` holding :attr:`trim_start` and :attr:`trim_endex`.
4729: """
4730:
+4731: return (self._trim_start if self._trim_start_ else None,
__Pyx_XDECREF(__pyx_r); if ((__pyx_v_self->_trim_start_ != 0)) { __pyx_t_2 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_self->_trim_start); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 4731, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_1 = __pyx_t_2; __pyx_t_2 = 0; } else { __Pyx_INCREF(Py_None); __pyx_t_1 = Py_None; } /* … */ __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 4731, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_GIVEREF(__pyx_t_1); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_1); __Pyx_GIVEREF(__pyx_t_2); PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_2); __pyx_t_1 = 0; __pyx_t_2 = 0; __pyx_r = __pyx_t_3; __pyx_t_3 = 0; goto __pyx_L0;
+4732: self._trim_endex if self._trim_endex_ else None)
if ((__pyx_v_self->_trim_endex_ != 0)) { __pyx_t_3 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_self->_trim_endex); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 4732, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_2 = __pyx_t_3; __pyx_t_3 = 0; } else { __Pyx_INCREF(Py_None); __pyx_t_2 = Py_None; }
4733:
4734: @trim_span.setter
+4735: def trim_span(
/* Python wrapper */ static int __pyx_pw_10bytesparse_2_c_6Memory_9trim_span_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_span); /*proto*/ static int __pyx_pw_10bytesparse_2_c_6Memory_9trim_span_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_span) { int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_9trim_span_2__set__(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), ((PyObject *)__pyx_v_span)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static int __pyx_pf_10bytesparse_2_c_6Memory_9trim_span_2__set__(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_span) { PyObject *__pyx_v_trim_start = NULL; PyObject *__pyx_v_trim_endex = NULL; addr_t __pyx_v_trim_start_; addr_t __pyx_v_trim_endex_; int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__set__", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("bytesparse._c.Memory.trim_span.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_XDECREF(__pyx_v_trim_start); __Pyx_XDECREF(__pyx_v_trim_endex); __Pyx_RefNannyFinishContext(); return __pyx_r; }
4736: self: 'Memory',
4737: span: OpenInterval,
4738: ) -> None:
4739:
+4740: trim_start, trim_endex = span
if ((likely(PyTuple_CheckExact(__pyx_v_span))) || (PyList_CheckExact(__pyx_v_span))) { PyObject* sequence = __pyx_v_span; Py_ssize_t size = __Pyx_PySequence_SIZE(sequence); if (unlikely(size != 2)) { if (size > 2) __Pyx_RaiseTooManyValuesError(2); else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); __PYX_ERR(0, 4740, __pyx_L1_error) } #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS if (likely(PyTuple_CheckExact(sequence))) { __pyx_t_1 = PyTuple_GET_ITEM(sequence, 0); __pyx_t_2 = PyTuple_GET_ITEM(sequence, 1); } else { __pyx_t_1 = PyList_GET_ITEM(sequence, 0); __pyx_t_2 = PyList_GET_ITEM(sequence, 1); } __Pyx_INCREF(__pyx_t_1); __Pyx_INCREF(__pyx_t_2); #else __pyx_t_1 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 4740, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_t_2 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 4740, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); #endif } else { Py_ssize_t index = -1; __pyx_t_3 = PyObject_GetIter(__pyx_v_span); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 4740, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_4 = Py_TYPE(__pyx_t_3)->tp_iternext; index = 0; __pyx_t_1 = __pyx_t_4(__pyx_t_3); if (unlikely(!__pyx_t_1)) goto __pyx_L3_unpacking_failed; __Pyx_GOTREF(__pyx_t_1); index = 1; __pyx_t_2 = __pyx_t_4(__pyx_t_3); if (unlikely(!__pyx_t_2)) goto __pyx_L3_unpacking_failed; __Pyx_GOTREF(__pyx_t_2); if (__Pyx_IternextUnpackEndCheck(__pyx_t_4(__pyx_t_3), 2) < 0) __PYX_ERR(0, 4740, __pyx_L1_error) __pyx_t_4 = NULL; __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; goto __pyx_L4_unpacking_done; __pyx_L3_unpacking_failed:; __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __pyx_t_4 = NULL; if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); __PYX_ERR(0, 4740, __pyx_L1_error) __pyx_L4_unpacking_done:; } __pyx_v_trim_start = __pyx_t_1; __pyx_t_1 = 0; __pyx_v_trim_endex = __pyx_t_2; __pyx_t_2 = 0;
4741:
+4742: if trim_start is None:
__pyx_t_5 = (__pyx_v_trim_start == Py_None); __pyx_t_6 = (__pyx_t_5 != 0); if (__pyx_t_6) { /* … */ goto __pyx_L5; }
+4743: trim_start_ = 0
__pyx_v_trim_start_ = 0;
+4744: self._trim_start_ = False
__pyx_v_self->_trim_start_ = 0;
4745: else:
+4746: trim_start_ = <addr_t>trim_start
/*else*/ { __pyx_t_7 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_trim_start); if (unlikely((__pyx_t_7 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 4746, __pyx_L1_error) __pyx_v_trim_start_ = ((addr_t)__pyx_t_7);
+4747: self._trim_start_ = True
__pyx_v_self->_trim_start_ = 1; } __pyx_L5:;
4748:
+4749: if trim_endex is None:
__pyx_t_6 = (__pyx_v_trim_endex == Py_None); __pyx_t_5 = (__pyx_t_6 != 0); if (__pyx_t_5) { /* … */ goto __pyx_L6; }
+4750: trim_endex_ = ADDR_MAX
__pyx_v_trim_endex_ = ADDR_MAX;
+4751: self._trim_endex_ = False
__pyx_v_self->_trim_endex_ = 0;
4752: else:
+4753: trim_endex_ = <addr_t>trim_endex
/*else*/ { __pyx_t_7 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_trim_endex); if (unlikely((__pyx_t_7 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 4753, __pyx_L1_error) __pyx_v_trim_endex_ = ((addr_t)__pyx_t_7);
+4754: self._trim_endex_ = True
__pyx_v_self->_trim_endex_ = 1; } __pyx_L6:;
4755:
+4756: if self._trim_start_ and self._trim_endex_ and trim_endex_ < trim_start_:
__pyx_t_6 = (__pyx_v_self->_trim_start_ != 0); if (__pyx_t_6) { } else { __pyx_t_5 = __pyx_t_6; goto __pyx_L8_bool_binop_done; } __pyx_t_6 = (__pyx_v_self->_trim_endex_ != 0); if (__pyx_t_6) { } else { __pyx_t_5 = __pyx_t_6; goto __pyx_L8_bool_binop_done; } __pyx_t_6 = ((__pyx_v_trim_endex_ < __pyx_v_trim_start_) != 0); __pyx_t_5 = __pyx_t_6; __pyx_L8_bool_binop_done:; if (__pyx_t_5) { /* … */ }
+4757: trim_endex_ = trim_start_
__pyx_v_trim_endex_ = __pyx_v_trim_start_;
4758:
+4759: self._trim_start = trim_start_
__pyx_v_self->_trim_start = __pyx_v_trim_start_;
+4760: self._trim_endex = trim_endex_
__pyx_v_self->_trim_endex = __pyx_v_trim_endex_;
+4761: if self._trim_start_ or self._trim_endex_:
__pyx_t_6 = (__pyx_v_self->_trim_start_ != 0); if (!__pyx_t_6) { } else { __pyx_t_5 = __pyx_t_6; goto __pyx_L12_bool_binop_done; } __pyx_t_6 = (__pyx_v_self->_trim_endex_ != 0); __pyx_t_5 = __pyx_t_6; __pyx_L12_bool_binop_done:; if (__pyx_t_5) { /* … */ }
+4762: self._crop_(trim_start_, trim_endex_, None)
__pyx_t_8 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_crop_(__pyx_v_self, __pyx_v_trim_start_, __pyx_v_trim_endex_, ((PyObject*)Py_None)); if (unlikely(__pyx_t_8 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 4762, __pyx_L1_error)
4763:
+4764: cdef addr_t start_(self):
static addr_t __pyx_f_10bytesparse_2_c_6Memory_start_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { Rack_ const *__pyx_v_blocks; addr_t __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("start_", 0); /* … */ /* function exit code */ __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
4765: cdef:
4766: const Rack_* blocks
4767:
+4768: if not self._trim_start_:
__pyx_t_1 = ((!(__pyx_v_self->_trim_start_ != 0)) != 0); if (__pyx_t_1) { /* … */ }
4769: # Return actual
+4770: blocks = self._
__pyx_t_2 = __pyx_v_self->_; __pyx_v_blocks = __pyx_t_2;
+4771: if Rack_Length(blocks):
__pyx_t_1 = (__pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks) != 0); if (__pyx_t_1) { /* … */ }
+4772: return Block_Start(Rack_First__(blocks))
__pyx_r = __pyx_f_10bytesparse_2_c_Block_Start(__pyx_f_10bytesparse_2_c_Rack_First__(__pyx_v_blocks)); goto __pyx_L0;
4773: else:
+4774: return 0
/*else*/ { __pyx_r = 0; goto __pyx_L0; }
4775: else:
+4776: return self._trim_start
/*else*/ { __pyx_r = __pyx_v_self->_trim_start; goto __pyx_L0; }
4777:
4778: @property
+4779: def start(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_5start_1__get__(PyObject *__pyx_v_self); /*proto*/ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_5start_1__get__(PyObject *__pyx_v_self) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_5start___get__(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_5start___get__(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("bytesparse._c.Memory.start.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
4780: self: 'Memory',
4781: ) -> Address:
4782: r"""int: Inclusive start address.
4783:
4784: This property holds the inclusive start address of the virtual space.
4785: By default, it is the current minimum inclusive start address of
4786: the first stored block.
4787:
4788: If :attr:`trim_start` not ``None``, that is returned.
4789:
4790: If the memory has no data and no trimming, 0 is returned.
4791:
4792: Examples:
4793: >>> Memory().start
4794: 0
4795:
4796: ~~~
4797:
4798: +---+---+---+---+---+---+---+---+---+
4799: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
4800: +===+===+===+===+===+===+===+===+===+
4801: | |[A | B | C]| |[x | y | z]| |
4802: +---+---+---+---+---+---+---+---+---+
4803:
4804: >>> memory = Memory(blocks=[[1, b'ABC'], [5, b'xyz']])
4805: >>> memory.start
4806: 1
4807:
4808: ~~~
4809:
4810: +---+---+---+---+---+---+---+---+---+
4811: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
4812: +===+===+===+===+===+===+===+===+===+
4813: | |[[[| | | |[x | y | z]| |
4814: +---+---+---+---+---+---+---+---+---+
4815:
4816: >>> memory = Memory(blocks=[[5, b'xyz']], start=1)
4817: >>> memory.start
4818: 1
4819: """
4820:
+4821: return self.start_()
__Pyx_XDECREF(__pyx_r); __pyx_t_1 = __Pyx_PyInt_From_uint_fast64_t(((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->start_(__pyx_v_self)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 4821, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0;
4822:
+4823: cdef addr_t endex_(self):
static addr_t __pyx_f_10bytesparse_2_c_6Memory_endex_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { Rack_ const *__pyx_v_blocks; addr_t __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("endex_", 0); /* … */ /* function exit code */ __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
4824: cdef:
4825: const Rack_* blocks
4826:
+4827: if not self._trim_endex_:
__pyx_t_1 = ((!(__pyx_v_self->_trim_endex_ != 0)) != 0); if (__pyx_t_1) { /* … */ }
4828: # Return actual
+4829: blocks = self._
__pyx_t_2 = __pyx_v_self->_; __pyx_v_blocks = __pyx_t_2;
+4830: if Rack_Length(blocks):
__pyx_t_1 = (__pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks) != 0); if (__pyx_t_1) { /* … */ }
+4831: return Block_Endex(Rack_Last__(blocks))
__pyx_r = __pyx_f_10bytesparse_2_c_Block_Endex(__pyx_f_10bytesparse_2_c_Rack_Last__(__pyx_v_blocks)); goto __pyx_L0;
4832: else:
+4833: return self.start_()
/*else*/ { __pyx_r = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->start_(__pyx_v_self); goto __pyx_L0; }
4834: else:
+4835: return self._trim_endex
/*else*/ { __pyx_r = __pyx_v_self->_trim_endex; goto __pyx_L0; }
4836:
4837: @property
+4838: def endex(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_5endex_1__get__(PyObject *__pyx_v_self); /*proto*/ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_5endex_1__get__(PyObject *__pyx_v_self) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_5endex___get__(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_5endex___get__(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("bytesparse._c.Memory.endex.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
4839: self: 'Memory',
4840: ) -> Address:
4841: r"""int: Exclusive end address.
4842:
4843: This property holds the exclusive end address of the virtual space.
4844: By default, it is the current maximmum exclusive end address of
4845: the last stored block.
4846:
4847: If :attr:`trim_endex` not ``None``, that is returned.
4848:
4849: If the memory has no data and no trimming, :attr:`start` is returned.
4850:
4851: Examples:
4852: >>> Memory().endex
4853: 0
4854:
4855: ~~~
4856:
4857: +---+---+---+---+---+---+---+---+---+
4858: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
4859: +===+===+===+===+===+===+===+===+===+
4860: | |[A | B | C]| |[x | y | z]| |
4861: +---+---+---+---+---+---+---+---+---+
4862:
4863: >>> memory = Memory(blocks=[[1, b'ABC'], [5, b'xyz']])
4864: >>> memory.endex
4865: 8
4866:
4867: ~~~
4868:
4869: +---+---+---+---+---+---+---+---+---+
4870: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
4871: +===+===+===+===+===+===+===+===+===+
4872: | |[A | B | C]| | | | |)))|
4873: +---+---+---+---+---+---+---+---+---+
4874:
4875: >>> memory = Memory(blocks=[[1, b'ABC']], endex=8)
4876: >>> memory.endex
4877: 8
4878: """
4879:
+4880: return self.endex_()
__Pyx_XDECREF(__pyx_r); __pyx_t_1 = __Pyx_PyInt_From_uint_fast64_t(((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->endex_(__pyx_v_self)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 4880, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0;
4881:
+4882: cdef (addr_t, addr_t) span_(self):
static __pyx_ctuple_addr_t__and_addr_t __pyx_f_10bytesparse_2_c_6Memory_span_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { __pyx_ctuple_addr_t__and_addr_t __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("span_", 0); /* … */ /* function exit code */ __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+4883: return self.start_(), self.endex_()
__pyx_t_1.f0 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->start_(__pyx_v_self); __pyx_t_1.f1 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->endex_(__pyx_v_self); __pyx_r = __pyx_t_1; goto __pyx_L0;
4884:
4885: @property
+4886: def span(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_4span_1__get__(PyObject *__pyx_v_self); /*proto*/ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_4span_1__get__(PyObject *__pyx_v_self) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_4span___get__(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_4span___get__(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("bytesparse._c.Memory.span.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
4887: self: 'Memory',
4888: ) -> ClosedInterval:
4889: r"""tuple of int: Memory address span.
4890:
4891: A :obj:`tuple` holding both :attr:`start` and :attr:`endex`.
4892:
4893: Examples:
4894: >>> Memory().span
4895: (0, 0)
4896: >>> Memory(start=1, endex=8).span
4897: (1, 8)
4898:
4899: ~~~
4900:
4901: +---+---+---+---+---+---+---+---+---+
4902: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
4903: +===+===+===+===+===+===+===+===+===+
4904: | |[A | B | C]| |[x | y | z]| |
4905: +---+---+---+---+---+---+---+---+---+
4906:
4907: >>> memory = Memory(blocks=[[1, b'ABC'], [5, b'xyz']])
4908: >>> memory.span
4909: (1, 8)
4910: """
4911:
+4912: return self.span_()
__Pyx_XDECREF(__pyx_r); __pyx_t_1 = __pyx_convert__to_py___pyx_ctuple_addr_t__and_addr_t(((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->span_(__pyx_v_self)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 4912, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0;
4913:
4914: @property
+4915: def endin(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_5endin_1__get__(PyObject *__pyx_v_self); /*proto*/ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_5endin_1__get__(PyObject *__pyx_v_self) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_5endin___get__(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_5endin___get__(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { Rack_ const *__pyx_v_blocks; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_4); __Pyx_AddTraceback("bytesparse._c.Memory.endin.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
4916: self: 'Memory',
4917: ) -> Address:
4918: r"""int: Inclusive end address.
4919:
4920: This property holds the inclusive end address of the virtual space.
4921: By default, it is the current maximmum inclusive end address of
4922: the last stored block.
4923:
4924: If :attr:`trim_endex` not ``None``, that minus one is returned.
4925:
4926: If the memory has no data and no trimming, :attr:`start` is returned.
4927:
4928: Examples:
4929: >>> Memory().endin
4930: -1
4931:
4932: ~~~
4933:
4934: +---+---+---+---+---+---+---+---+---+
4935: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
4936: +===+===+===+===+===+===+===+===+===+
4937: | |[A | B | C]| |[x | y | z]| |
4938: +---+---+---+---+---+---+---+---+---+
4939:
4940: >>> memory = Memory(blocks=[[1, b'ABC'], [5, b'xyz']])
4941: >>> memory.endin
4942: 7
4943:
4944: ~~~
4945:
4946: +---+---+---+---+---+---+---+---+---+
4947: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
4948: +===+===+===+===+===+===+===+===+===+
4949: | |[A | B | C]| | | | |)))|
4950: +---+---+---+---+---+---+---+---+---+
4951:
4952: >>> memory = Memory(blocks=[[1, b'ABC']], endex=8)
4953: >>> memory.endin
4954: 7
4955: """
4956: cdef:
4957: const Rack_* blocks
4958:
+4959: if not self._trim_endex_:
__pyx_t_1 = ((!(__pyx_v_self->_trim_endex_ != 0)) != 0); if (__pyx_t_1) { /* … */ }
4960: # Return actual
+4961: blocks = self._
__pyx_t_2 = __pyx_v_self->_; __pyx_v_blocks = __pyx_t_2;
+4962: if Rack_Length(blocks):
__pyx_t_1 = (__pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks) != 0); if (__pyx_t_1) { /* … */ }
+4963: return <object>Block_Endex(Rack_Last__(blocks)) - 1
__Pyx_XDECREF(__pyx_r); __pyx_t_3 = __Pyx_PyInt_From_uint_fast64_t(__pyx_f_10bytesparse_2_c_Block_Endex(__pyx_f_10bytesparse_2_c_Rack_Last__(__pyx_v_blocks))); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 4963, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_4 = __Pyx_PyInt_SubtractObjC(__pyx_t_3, __pyx_int_1, 1, 0, 0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 4963, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __pyx_r = __pyx_t_4; __pyx_t_4 = 0; goto __pyx_L0;
4964: else:
+4965: return self.start - 1
/*else*/ { __Pyx_XDECREF(__pyx_r); __pyx_t_4 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_start); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 4965, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __pyx_t_3 = __Pyx_PyInt_SubtractObjC(__pyx_t_4, __pyx_int_1, 1, 0, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 4965, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __pyx_r = __pyx_t_3; __pyx_t_3 = 0; goto __pyx_L0; }
4966: else:
+4967: return <object>self._trim_endex - 1
/*else*/ { __Pyx_XDECREF(__pyx_r); __pyx_t_3 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_self->_trim_endex); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 4967, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_4 = __Pyx_PyInt_SubtractObjC(__pyx_t_3, __pyx_int_1, 1, 0, 0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 4967, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __pyx_r = __pyx_t_4; __pyx_t_4 = 0; goto __pyx_L0; }
4968:
+4969: cdef addr_t content_start_(self):
static addr_t __pyx_f_10bytesparse_2_c_6Memory_content_start_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { Rack_ const *__pyx_v_blocks; addr_t __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("content_start_", 0); /* … */ /* function exit code */ __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
4970: cdef:
+4971: const Rack_* blocks = self._
__pyx_t_1 = __pyx_v_self->_; __pyx_v_blocks = __pyx_t_1;
4972:
+4973: if Rack_Length(blocks):
__pyx_t_2 = (__pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks) != 0); if (__pyx_t_2) { /* … */ }
+4974: return Block_Start(Rack_First__(blocks))
__pyx_r = __pyx_f_10bytesparse_2_c_Block_Start(__pyx_f_10bytesparse_2_c_Rack_First__(__pyx_v_blocks)); goto __pyx_L0;
+4975: elif not self._trim_start_:
__pyx_t_2 = ((!(__pyx_v_self->_trim_start_ != 0)) != 0); if (__pyx_t_2) { /* … */ }
+4976: return 0
__pyx_r = 0; goto __pyx_L0;
4977: else:
+4978: return self._trim_start
/*else*/ { __pyx_r = __pyx_v_self->_trim_start; goto __pyx_L0; }
4979:
4980: @property
+4981: def content_start(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_13content_start_1__get__(PyObject *__pyx_v_self); /*proto*/ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_13content_start_1__get__(PyObject *__pyx_v_self) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_13content_start___get__(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_13content_start___get__(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("bytesparse._c.Memory.content_start.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
4982: self: 'Memory',
4983: ) -> Address:
4984: r"""int: Inclusive content start address.
4985:
4986: This property holds the inclusive start address of the memory content.
4987: By default, it is the current minimum inclusive start address of
4988: the first stored block.
4989:
4990: If the memory has no data and no trimming, 0 is returned.
4991:
4992: Trimming is considered only for an empty memory.
4993:
4994: Examples:
4995: >>> Memory().content_start
4996: 0
4997: >>> Memory(start=1).content_start
4998: 1
4999: >>> Memory(start=1, endex=8).content_start
5000: 1
5001:
5002: ~~~
5003:
5004: +---+---+---+---+---+---+---+---+---+
5005: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
5006: +===+===+===+===+===+===+===+===+===+
5007: | |[A | B | C]| |[x | y | z]| |
5008: +---+---+---+---+---+---+---+---+---+
5009:
5010: >>> memory = Memory(blocks=[[1, b'ABC'], [5, b'xyz']])
5011: >>> memory.content_start
5012: 1
5013:
5014: ~~~
5015:
5016: +---+---+---+---+---+---+---+---+---+
5017: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
5018: +===+===+===+===+===+===+===+===+===+
5019: | |[[[| | | |[x | y | z]| |
5020: +---+---+---+---+---+---+---+---+---+
5021:
5022: >>> memory = Memory(blocks=[[5, b'xyz']], start=1)
5023: >>> memory.content_start
5024: 5
5025: """
5026:
+5027: return self.content_start_()
__Pyx_XDECREF(__pyx_r); __pyx_t_1 = __Pyx_PyInt_From_uint_fast64_t(((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->content_start_(__pyx_v_self)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 5027, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0;
5028:
+5029: cdef addr_t content_endex_(self):
static addr_t __pyx_f_10bytesparse_2_c_6Memory_content_endex_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { Rack_ const *__pyx_v_blocks; addr_t __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("content_endex_", 0); /* … */ /* function exit code */ __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
5030: cdef:
+5031: const Rack_* blocks = self._
__pyx_t_1 = __pyx_v_self->_; __pyx_v_blocks = __pyx_t_1;
5032:
+5033: if Rack_Length(blocks):
__pyx_t_2 = (__pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks) != 0); if (__pyx_t_2) { /* … */ }
+5034: return Block_Endex(Rack_Last__(blocks))
__pyx_r = __pyx_f_10bytesparse_2_c_Block_Endex(__pyx_f_10bytesparse_2_c_Rack_Last__(__pyx_v_blocks)); goto __pyx_L0;
+5035: elif not self._trim_start_:
__pyx_t_2 = ((!(__pyx_v_self->_trim_start_ != 0)) != 0); if (__pyx_t_2) { /* … */ }
+5036: return 0 # default to start
__pyx_r = 0; goto __pyx_L0;
5037: else:
+5038: return self._trim_start # default to start
/*else*/ { __pyx_r = __pyx_v_self->_trim_start; goto __pyx_L0; }
5039:
5040: @property
+5041: def content_endex(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_13content_endex_1__get__(PyObject *__pyx_v_self); /*proto*/ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_13content_endex_1__get__(PyObject *__pyx_v_self) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_13content_endex___get__(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_13content_endex___get__(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("bytesparse._c.Memory.content_endex.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
5042: self: 'Memory',
5043: ) -> Address:
5044: r"""int: Exclusive content end address.
5045:
5046: This property holds the exclusive end address of the memory content.
5047: By default, it is the current maximmum exclusive end address of
5048: the last stored block.
5049:
5050: If the memory has no data and no trimming, :attr:`start` is returned.
5051:
5052: Trimming is considered only for an empty memory.
5053:
5054: Examples:
5055: >>> Memory().content_endex
5056: 0
5057: >>> Memory(endex=8).content_endex
5058: 0
5059: >>> Memory(start=1, endex=8).content_endex
5060: 1
5061:
5062: ~~~
5063:
5064: +---+---+---+---+---+---+---+---+---+
5065: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
5066: +===+===+===+===+===+===+===+===+===+
5067: | |[A | B | C]| |[x | y | z]| |
5068: +---+---+---+---+---+---+---+---+---+
5069:
5070: >>> memory = Memory(blocks=[[1, b'ABC'], [5, b'xyz']])
5071: >>> memory.content_endex
5072: 8
5073:
5074: ~~~
5075:
5076: +---+---+---+---+---+---+---+---+---+
5077: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
5078: +===+===+===+===+===+===+===+===+===+
5079: | |[A | B | C]| | | | |)))|
5080: +---+---+---+---+---+---+---+---+---+
5081:
5082: >>> memory = Memory(blocks=[[1, b'ABC']], endex=8)
5083: >>> memory.content_endex
5084: 4
5085: """
5086:
+5087: return self.content_endex_()
__Pyx_XDECREF(__pyx_r); __pyx_t_1 = __Pyx_PyInt_From_uint_fast64_t(((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->content_endex_(__pyx_v_self)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 5087, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0;
5088:
+5089: cdef (addr_t, addr_t) content_span_(self):
static __pyx_ctuple_addr_t__and_addr_t __pyx_f_10bytesparse_2_c_6Memory_content_span_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { __pyx_ctuple_addr_t__and_addr_t __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("content_span_", 0); /* … */ /* function exit code */ __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+5090: return self.content_start_(), self.content_endex_()
__pyx_t_1.f0 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->content_start_(__pyx_v_self); __pyx_t_1.f1 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->content_endex_(__pyx_v_self); __pyx_r = __pyx_t_1; goto __pyx_L0;
5091:
5092: @property
+5093: def content_span(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_12content_span_1__get__(PyObject *__pyx_v_self); /*proto*/ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_12content_span_1__get__(PyObject *__pyx_v_self) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_12content_span___get__(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_12content_span___get__(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("bytesparse._c.Memory.content_span.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
5094: self: 'Memory',
5095: ) -> ClosedInterval:
5096: r"""tuple of int: Memory content address span.
5097:
5098: A :attr:`tuple` holding both :attr:`content_start` and
5099: :attr:`content_endex`.
5100:
5101: Examples:
5102: >>> Memory().content_span
5103: (0, 0)
5104: >>> Memory(start=1).content_span
5105: (1, 1)
5106: >>> Memory(endex=8).content_span
5107: (0, 0)
5108: >>> Memory(start=1, endex=8).content_span
5109: (1, 1)
5110:
5111: ~~~
5112:
5113: +---+---+---+---+---+---+---+---+---+
5114: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
5115: +===+===+===+===+===+===+===+===+===+
5116: | |[A | B | C]| |[x | y | z]| |
5117: +---+---+---+---+---+---+---+---+---+
5118:
5119: >>> memory = Memory(blocks=[[1, b'ABC'], [5, b'xyz']])
5120: >>> memory.content_span
5121: (1, 8)
5122: """
5123:
+5124: return self.content_span_()
__Pyx_XDECREF(__pyx_r); __pyx_t_1 = __pyx_convert__to_py___pyx_ctuple_addr_t__and_addr_t(((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->content_span_(__pyx_v_self)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 5124, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0;
5125:
5126: @property
+5127: def content_endin(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_13content_endin_1__get__(PyObject *__pyx_v_self); /*proto*/ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_13content_endin_1__get__(PyObject *__pyx_v_self) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_13content_endin___get__(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_13content_endin___get__(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { Rack_ const *__pyx_v_blocks; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_4); __Pyx_AddTraceback("bytesparse._c.Memory.content_endin.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
5128: self: 'Memory',
5129: ) -> Address:
5130: r"""int: Inclusive content end address.
5131:
5132: This property holds the inclusive end address of the memory content.
5133: By default, it is the current maximmum inclusive end address of
5134: the last stored block.
5135:
5136: If the memory has no data and no trimming, :attr:`start` minus one is
5137: returned.
5138:
5139: Trimming is considered only for an empty memory.
5140:
5141: Examples:
5142: >>> Memory().content_endin
5143: -1
5144: >>> Memory(endex=8).content_endin
5145: -1
5146: >>> Memory(start=1, endex=8).content_endin
5147: 0
5148:
5149: ~~~
5150:
5151: +---+---+---+---+---+---+---+---+---+
5152: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
5153: +===+===+===+===+===+===+===+===+===+
5154: | |[A | B | C]| |[x | y | z]| |
5155: +---+---+---+---+---+---+---+---+---+
5156:
5157: >>> memory = Memory(blocks=[[1, b'ABC'], [5, b'xyz']])
5158: >>> memory.content_endin
5159: 7
5160:
5161: ~~~
5162:
5163: +---+---+---+---+---+---+---+---+---+
5164: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
5165: +===+===+===+===+===+===+===+===+===+
5166: | |[A | B | C]| | | | |)))|
5167: +---+---+---+---+---+---+---+---+---+
5168:
5169: >>> memory = Memory(blocks=[[1, b'ABC']], endex=8)
5170: >>> memory.content_endin
5171: 3
5172: """
5173: cdef:
+5174: const Rack_* blocks = self._
__pyx_t_1 = __pyx_v_self->_; __pyx_v_blocks = __pyx_t_1;
5175:
+5176: if Rack_Length(blocks):
__pyx_t_2 = (__pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks) != 0); if (__pyx_t_2) { /* … */ }
+5177: return <object>Block_Endex(Rack_Last__(blocks)) - 1
__Pyx_XDECREF(__pyx_r); __pyx_t_3 = __Pyx_PyInt_From_uint_fast64_t(__pyx_f_10bytesparse_2_c_Block_Endex(__pyx_f_10bytesparse_2_c_Rack_Last__(__pyx_v_blocks))); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 5177, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_4 = __Pyx_PyInt_SubtractObjC(__pyx_t_3, __pyx_int_1, 1, 0, 0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 5177, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __pyx_r = __pyx_t_4; __pyx_t_4 = 0; goto __pyx_L0;
+5178: elif not self._trim_start_: # default to start-1
__pyx_t_2 = ((!(__pyx_v_self->_trim_start_ != 0)) != 0); if (__pyx_t_2) { /* … */ }
+5179: return -1
__Pyx_XDECREF(__pyx_r); __Pyx_INCREF(__pyx_int_neg_1); __pyx_r = __pyx_int_neg_1; goto __pyx_L0;
5180: else:
+5181: return <object>self._trim_start - 1 # default to start-1
/*else*/ { __Pyx_XDECREF(__pyx_r); __pyx_t_4 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_self->_trim_start); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 5181, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __pyx_t_3 = __Pyx_PyInt_SubtractObjC(__pyx_t_4, __pyx_int_1, 1, 0, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 5181, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __pyx_r = __pyx_t_3; __pyx_t_3 = 0; goto __pyx_L0; }
5182:
+5183: cdef addr_t content_size_(self):
static addr_t __pyx_f_10bytesparse_2_c_6Memory_content_size_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { Rack_ const *__pyx_v_blocks; size_t __pyx_v_block_index; Block_ const *__pyx_v_block; addr_t __pyx_v_content_size; addr_t __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("content_size_", 0); /* … */ /* function exit code */ __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
5184: cdef:
+5185: const Rack_* blocks = self._
__pyx_t_1 = __pyx_v_self->_; __pyx_v_blocks = __pyx_t_1;
5186: size_t block_index
5187: const Block_* block
+5188: addr_t content_size = 0
__pyx_v_content_size = 0;
5189:
+5190: for block_index in range(Rack_Length(blocks)):
__pyx_t_2 = __pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks); __pyx_t_3 = __pyx_t_2; for (__pyx_t_4 = 0; __pyx_t_4 < __pyx_t_3; __pyx_t_4+=1) { __pyx_v_block_index = __pyx_t_4;
+5191: block = Rack_Get__(blocks, block_index)
__pyx_v_block = __pyx_f_10bytesparse_2_c_Rack_Get__(__pyx_v_blocks, __pyx_v_block_index);
+5192: content_size += Block_Length(block)
__pyx_v_content_size = (__pyx_v_content_size + __pyx_f_10bytesparse_2_c_Block_Length(__pyx_v_block)); }
+5193: return content_size
__pyx_r = __pyx_v_content_size; goto __pyx_L0;
5194:
5195: @property
+5196: def content_size(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_12content_size_1__get__(PyObject *__pyx_v_self); /*proto*/ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_12content_size_1__get__(PyObject *__pyx_v_self) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_12content_size___get__(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_12content_size___get__(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("bytesparse._c.Memory.content_size.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
5197: self: 'Memory',
5198: ) -> Address:
5199: r"""Actual content size.
5200:
5201: Returns:
5202: int: The sum of all block lengths.
5203:
5204: Examples:
5205: >>> Memory().content_size
5206: 0
5207: >>> Memory(start=1, endex=8).content_size
5208: 0
5209:
5210: ~~~
5211:
5212: +---+---+---+---+---+---+---+---+---+
5213: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
5214: +===+===+===+===+===+===+===+===+===+
5215: | |[A | B | C]| |[x | y | z]| |
5216: +---+---+---+---+---+---+---+---+---+
5217:
5218: >>> memory = Memory(blocks=[[1, b'ABC'], [5, b'xyz']])
5219: >>> memory.content_size
5220: 6
5221:
5222: ~~~
5223:
5224: +---+---+---+---+---+---+---+---+---+
5225: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
5226: +===+===+===+===+===+===+===+===+===+
5227: | |[A | B | C]| | | | |)))|
5228: +---+---+---+---+---+---+---+---+---+
5229:
5230: >>> memory = Memory(blocks=[[1, b'ABC']], endex=8)
5231: >>> memory.content_size
5232: 3
5233: """
5234:
+5235: return self.content_size_()
__Pyx_XDECREF(__pyx_r); __pyx_t_1 = __Pyx_PyInt_From_uint_fast64_t(((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->content_size_(__pyx_v_self)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 5235, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0;
5236:
+5237: cdef size_t content_parts_(self):
static size_t __pyx_f_10bytesparse_2_c_6Memory_content_parts_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { size_t __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("content_parts_", 0); /* … */ /* function exit code */ __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+5238: return Rack_Length(self._)
__pyx_r = __pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_self->_); goto __pyx_L0;
5239:
5240: @property
+5241: def content_parts(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_13content_parts_1__get__(PyObject *__pyx_v_self); /*proto*/ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_13content_parts_1__get__(PyObject *__pyx_v_self) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_13content_parts___get__(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_13content_parts___get__(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("bytesparse._c.Memory.content_parts.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
5242: self: 'Memory',
5243: ) -> int:
5244: r"""Number of blocks.
5245:
5246: Returns:
5247: int: The number of blocks.
5248:
5249: Examples:
5250: >>> Memory().content_parts
5251: 0
5252:
5253: ~~~
5254:
5255: +---+---+---+---+---+---+---+---+---+
5256: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
5257: +===+===+===+===+===+===+===+===+===+
5258: | |[A | B | C]| |[x | y | z]| |
5259: +---+---+---+---+---+---+---+---+---+
5260:
5261: >>> memory = Memory(blocks=[[1, b'ABC'], [5, b'xyz']])
5262: >>> memory.content_parts
5263: 2
5264:
5265: ~~~
5266:
5267: +---+---+---+---+---+---+---+---+---+
5268: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
5269: +===+===+===+===+===+===+===+===+===+
5270: | |[A | B | C]| | | | |)))|
5271: +---+---+---+---+---+---+---+---+---+
5272:
5273: >>> memory = Memory(blocks=[[1, b'ABC']], endex=8)
5274: >>> memory.content_parts
5275: 1
5276: """
5277:
+5278: return self.content_parts_()
__Pyx_XDECREF(__pyx_r); __pyx_t_1 = __Pyx_PyInt_FromSize_t(((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->content_parts_(__pyx_v_self)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 5278, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0;
5279:
+5280: cdef vint validate_(self) except -1:
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_6Memory_validate_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { Rack_ const *__pyx_v_blocks; size_t __pyx_v_block_count; addr_t __pyx_v_start; addr_t __pyx_v_endex; addr_t __pyx_v_previous_endex; size_t __pyx_v_block_index; Block_ const *__pyx_v_block; addr_t __pyx_v_block_start; addr_t __pyx_v_block_endex; __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("validate_", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_6); __Pyx_AddTraceback("bytesparse._c.Memory.validate_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
5281: cdef:
+5282: const Rack_* blocks = self._
__pyx_t_1 = __pyx_v_self->_; __pyx_v_blocks = __pyx_t_1;
+5283: size_t block_count = Rack_Length(blocks)
__pyx_v_block_count = __pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks);
5284:
5285: addr_t start
5286: addr_t endex
+5287: addr_t previous_endex = 0
__pyx_v_previous_endex = 0;
5288:
5289: size_t block_index
5290: const Block_* block
5291: addr_t block_start
5292: addr_t block_endex
5293:
+5294: start, endex = self.bound_(None, None)
__pyx_t_2 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->bound_(__pyx_v_self, Py_None, Py_None); __pyx_t_3 = __pyx_t_2.f0; __pyx_t_4 = __pyx_t_2.f1; __pyx_v_start = __pyx_t_3; __pyx_v_endex = __pyx_t_4;
+5295: block_count = Rack_Length(blocks)
__pyx_v_block_count = __pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks);
5296:
+5297: if block_count:
__pyx_t_5 = (__pyx_v_block_count != 0); if (__pyx_t_5) { /* … */ goto __pyx_L3; }
+5298: if endex <= start:
__pyx_t_5 = ((__pyx_v_endex <= __pyx_v_start) != 0); if (unlikely(__pyx_t_5)) { /* … */ }
+5299: raise ValueError('invalid bounds')
__pyx_t_6 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__24, NULL); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 5299, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __Pyx_Raise(__pyx_t_6, 0, 0, 0); __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __PYX_ERR(0, 5299, __pyx_L1_error) /* … */ __pyx_tuple__24 = PyTuple_Pack(1, __pyx_kp_u_invalid_bounds); if (unlikely(!__pyx_tuple__24)) __PYX_ERR(0, 5299, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__24); __Pyx_GIVEREF(__pyx_tuple__24);
5300:
+5301: for block_index in range(block_count):
__pyx_t_7 = __pyx_v_block_count; __pyx_t_8 = __pyx_t_7; for (__pyx_t_9 = 0; __pyx_t_9 < __pyx_t_8; __pyx_t_9+=1) { __pyx_v_block_index = __pyx_t_9;
+5302: block = Rack_Get__(blocks, block_index)
__pyx_v_block = __pyx_f_10bytesparse_2_c_Rack_Get__(__pyx_v_blocks, __pyx_v_block_index);
+5303: block_start = Block_Start(block)
__pyx_v_block_start = __pyx_f_10bytesparse_2_c_Block_Start(__pyx_v_block);
+5304: block_endex = Block_Endex(block)
__pyx_v_block_endex = __pyx_f_10bytesparse_2_c_Block_Endex(__pyx_v_block);
5305:
+5306: if block_index: # skip first
__pyx_t_5 = (__pyx_v_block_index != 0); if (__pyx_t_5) { /* … */ }
+5307: if block_start <= previous_endex:
__pyx_t_5 = ((__pyx_v_block_start <= __pyx_v_previous_endex) != 0); if (unlikely(__pyx_t_5)) { /* … */ }
+5308: raise ValueError('invalid block interleaving')
__pyx_t_6 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__25, NULL); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 5308, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __Pyx_Raise(__pyx_t_6, 0, 0, 0); __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __PYX_ERR(0, 5308, __pyx_L1_error) /* … */ __pyx_tuple__25 = PyTuple_Pack(1, __pyx_kp_u_invalid_block_interleaving); if (unlikely(!__pyx_tuple__25)) __PYX_ERR(0, 5308, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__25); __Pyx_GIVEREF(__pyx_tuple__25);
5309:
+5310: if block_endex <= block_start:
__pyx_t_5 = ((__pyx_v_block_endex <= __pyx_v_block_start) != 0); if (unlikely(__pyx_t_5)) { /* … */ }
+5311: raise ValueError('invalid block data size')
__pyx_t_6 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__4, NULL); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 5311, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __Pyx_Raise(__pyx_t_6, 0, 0, 0); __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __PYX_ERR(0, 5311, __pyx_L1_error)
5312:
+5313: if block_start < start or endex < block_endex:
__pyx_t_10 = ((__pyx_v_block_start < __pyx_v_start) != 0); if (!__pyx_t_10) { } else { __pyx_t_5 = __pyx_t_10; goto __pyx_L11_bool_binop_done; } __pyx_t_10 = ((__pyx_v_endex < __pyx_v_block_endex) != 0); __pyx_t_5 = __pyx_t_10; __pyx_L11_bool_binop_done:; if (unlikely(__pyx_t_5)) { /* … */ }
+5314: raise ValueError('invalid block bounds')
__pyx_t_6 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__26, NULL); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 5314, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __Pyx_Raise(__pyx_t_6, 0, 0, 0); __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __PYX_ERR(0, 5314, __pyx_L1_error) /* … */ __pyx_tuple__26 = PyTuple_Pack(1, __pyx_kp_u_invalid_block_bounds); if (unlikely(!__pyx_tuple__26)) __PYX_ERR(0, 5314, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__26); __Pyx_GIVEREF(__pyx_tuple__26);
5315:
+5316: previous_endex = block_endex
__pyx_v_previous_endex = __pyx_v_block_endex; }
5317:
5318: else:
+5319: if endex < start:
/*else*/ { __pyx_t_5 = ((__pyx_v_endex < __pyx_v_start) != 0); if (unlikely(__pyx_t_5)) { /* … */ } } __pyx_L3:;
+5320: raise ValueError('invalid bounds')
__pyx_t_6 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__24, NULL); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 5320, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __Pyx_Raise(__pyx_t_6, 0, 0, 0); __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __PYX_ERR(0, 5320, __pyx_L1_error)
5321:
+5322: def validate(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_71validate(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_70validate[] = "Memory.validate(self: u'Memory') -> None\nValidates internal structure.\n\n It makes sure that all the allocated blocks are sorted by block start\n address, and that all the blocks are non-overlapping.\n\n Raises:\n :obj:`ValueError`: Invalid data detected (see exception message).\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_71validate(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("validate (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_70validate(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_70validate(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("validate", 0); /* … */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Memory.validate", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
5323: self: 'Memory',
5324: ) -> None:
5325: r"""Validates internal structure.
5326:
5327: It makes sure that all the allocated blocks are sorted by block start
5328: address, and that all the blocks are non-overlapping.
5329:
5330: Raises:
5331: :obj:`ValueError`: Invalid data detected (see exception message).
5332: """
5333:
+5334: self.validate_()
__pyx_t_1 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->validate_(__pyx_v_self); if (unlikely(__pyx_t_1 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 5334, __pyx_L1_error)
5335:
+5336: cdef (addr_t, addr_t) bound_(self, object start, object endex):
static __pyx_ctuple_addr_t__and_addr_t __pyx_f_10bytesparse_2_c_6Memory_bound_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_start, PyObject *__pyx_v_endex) { addr_t __pyx_v_trim_start; addr_t __pyx_v_trim_endex; addr_t __pyx_v_start_; addr_t __pyx_v_endex_; __pyx_ctuple_addr_t__and_addr_t __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("bound_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_WriteUnraisable("bytesparse._c.Memory.bound_", __pyx_clineno, __pyx_lineno, __pyx_filename, 1, 0); __Pyx_pretend_to_initialize(&__pyx_r); __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
5337: cdef:
5338: addr_t trim_start
5339: addr_t trim_endex
+5340: addr_t start_ = 0 if start is None else <addr_t>start
__pyx_t_2 = (__pyx_v_start == Py_None); if ((__pyx_t_2 != 0)) { __pyx_t_1 = 0; } else { __pyx_t_3 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_start); if (unlikely((__pyx_t_3 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 5340, __pyx_L1_error) __pyx_t_1 = ((addr_t)__pyx_t_3); } __pyx_v_start_ = __pyx_t_1;
+5341: addr_t endex_ = start_ if endex is None else <addr_t>endex
__pyx_t_2 = (__pyx_v_endex == Py_None); if ((__pyx_t_2 != 0)) { __pyx_t_1 = __pyx_v_start_; } else { __pyx_t_3 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_endex); if (unlikely((__pyx_t_3 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 5341, __pyx_L1_error) __pyx_t_1 = ((addr_t)__pyx_t_3); } __pyx_v_endex_ = __pyx_t_1;
5342:
+5343: trim_start = self._trim_start
__pyx_t_1 = __pyx_v_self->_trim_start; __pyx_v_trim_start = __pyx_t_1;
+5344: trim_endex = self._trim_endex
__pyx_t_1 = __pyx_v_self->_trim_endex; __pyx_v_trim_endex = __pyx_t_1;
5345:
+5346: if start is None:
__pyx_t_2 = (__pyx_v_start == Py_None); __pyx_t_4 = (__pyx_t_2 != 0); if (__pyx_t_4) { /* … */ goto __pyx_L3; }
+5347: if not self._trim_start_:
__pyx_t_4 = ((!(__pyx_v_self->_trim_start_ != 0)) != 0); if (__pyx_t_4) { /* … */ goto __pyx_L4; }
+5348: if Rack_Length(self._):
__pyx_t_4 = (__pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_self->_) != 0); if (__pyx_t_4) { /* … */ goto __pyx_L5; }
+5349: start_ = Block_Start(Rack_First__(self._))
__pyx_v_start_ = __pyx_f_10bytesparse_2_c_Block_Start(__pyx_f_10bytesparse_2_c_Rack_First__(__pyx_v_self->_));
5350: else:
+5351: start_ = 0
/*else*/ { __pyx_v_start_ = 0; } __pyx_L5:;
5352: else:
+5353: start_ = trim_start
/*else*/ { __pyx_v_start_ = __pyx_v_trim_start; } __pyx_L4:;
5354: else:
+5355: if self._trim_start_:
/*else*/ { __pyx_t_4 = (__pyx_v_self->_trim_start_ != 0); if (__pyx_t_4) { /* … */ }
+5356: if start_ < trim_start:
__pyx_t_4 = ((__pyx_v_start_ < __pyx_v_trim_start) != 0); if (__pyx_t_4) { /* … */ }
+5357: start_ = trim_start
__pyx_v_start_ = __pyx_v_trim_start;
+5358: if endex is not None:
__pyx_t_4 = (__pyx_v_endex != Py_None); __pyx_t_2 = (__pyx_t_4 != 0); if (__pyx_t_2) { /* … */ } } __pyx_L3:;
+5359: if endex_ < start_:
__pyx_t_2 = ((__pyx_v_endex_ < __pyx_v_start_) != 0); if (__pyx_t_2) { /* … */ }
+5360: endex_ = start_
__pyx_v_endex_ = __pyx_v_start_;
5361:
+5362: if endex is None:
__pyx_t_2 = (__pyx_v_endex == Py_None); __pyx_t_4 = (__pyx_t_2 != 0); if (__pyx_t_4) { /* … */ goto __pyx_L10; }
+5363: if not self._trim_endex_:
__pyx_t_4 = ((!(__pyx_v_self->_trim_endex_ != 0)) != 0); if (__pyx_t_4) { /* … */ goto __pyx_L11; }
+5364: if Rack_Length(self._):
__pyx_t_4 = (__pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_self->_) != 0); if (__pyx_t_4) { /* … */ goto __pyx_L12; }
+5365: endex_ = Block_Endex(Rack_Last__(self._))
__pyx_v_endex_ = __pyx_f_10bytesparse_2_c_Block_Endex(__pyx_f_10bytesparse_2_c_Rack_Last__(__pyx_v_self->_));
5366: else:
+5367: endex_ = start_
/*else*/ { __pyx_v_endex_ = __pyx_v_start_; } __pyx_L12:;
5368: else:
+5369: endex_ = trim_endex
/*else*/ { __pyx_v_endex_ = __pyx_v_trim_endex; } __pyx_L11:;
5370: else:
+5371: if self._trim_endex_:
/*else*/ { __pyx_t_4 = (__pyx_v_self->_trim_endex_ != 0); if (__pyx_t_4) { /* … */ }
+5372: if endex_ > trim_endex:
__pyx_t_4 = ((__pyx_v_endex_ > __pyx_v_trim_endex) != 0); if (__pyx_t_4) { /* … */ }
+5373: endex_ = trim_endex
__pyx_v_endex_ = __pyx_v_trim_endex;
+5374: if start is not None:
__pyx_t_4 = (__pyx_v_start != Py_None); __pyx_t_2 = (__pyx_t_4 != 0); if (__pyx_t_2) { /* … */ } } __pyx_L10:;
+5375: if start_ > endex_:
__pyx_t_2 = ((__pyx_v_start_ > __pyx_v_endex_) != 0); if (__pyx_t_2) { /* … */ }
+5376: start_ = endex_
__pyx_v_start_ = __pyx_v_endex_;
5377:
+5378: return start_, endex_
__pyx_t_5.f0 = __pyx_v_start_; __pyx_t_5.f1 = __pyx_v_endex_; __pyx_r = __pyx_t_5; goto __pyx_L0;
5379:
+5380: def bound(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_73bound(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_72bound[] = "Memory.bound(self: u'Memory', start: Optional[Address], endex: Optional[Address]) -> ClosedInterval\nBounds addresses.\n\n It bounds the given addresses to stay within memory limits.\n ``None`` is used to ignore a limit for the `start` or `endex`\n directions.\n\n In case of stored data, :attr:`content_start` and\n :attr:`content_endex` are used as bounds.\n\n In case of trimming limits, :attr:`trim_start` or :attr:`trim_endex`\n are used as bounds, when not ``None``.\n\n In case `start` and `endex` are in the wrong order, one clamps\n the other if present (see the Python implementation for details).\n\n Returns:\n tuple of int: Bounded `start` and `endex`, closed interval.\n\n Examples:\n >>> Memory().bound()\n (0, 0)\n >>> Memory().bound(endex=100)\n (0, 0)\n\n ~~~\n\n +---+---+---+---+---+---+---+---+---+\n | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |\n +===+===+===+===+===+===+===+===+===+\n | |[A | B | C]| |[x | y | z]| |\n +---+---+---+---+---+---+---+---+---+\n\n >>> memory = Memory(blocks=[[1, b'ABC'], [5, b'xyz']])\n >>> memory.bound(0, 30)\n (1, 8)\n >>> memory.bound(2, 6)\n (2, 6)\n >>> memory.bound(endex=6)\n (1, 6)\n >>> memory.bound(start=2)\n (2, 8)\n\n ~~~\n\n +---+---+---+---+---+---+---+---+---+\n | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |\n +===+===+===+===+===+===+===+===+===+\n | |[[[| |[A | B | C]| | |)))|\n +---+---+---+---+---+---+---+---+---+\n\n >>> memory = Memory(blocks=[[3, b'ABC']], start=1, endex=8)\n >>> memory.bound()\n (1, 8)\n >>> memory.bound(0, 30)\n (1, 8)\n >>> memory.bound(2, 6)\n (2, 6)\n "" >>> memory.bound(start=2)\n (2, 8)\n >>> memory.bound(endex=6)\n (1, 6)\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_73bound(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_start = 0; PyObject *__pyx_v_endex = 0; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("bound (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_start,&__pyx_n_s_endex,0}; PyObject* values[2] = {0,0}; if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_start)) != 0)) kw_args--; else goto __pyx_L5_argtuple_error; CYTHON_FALLTHROUGH; case 1: if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_endex)) != 0)) kw_args--; else { __Pyx_RaiseArgtupleInvalid("bound", 1, 2, 2, 1); __PYX_ERR(0, 5380, __pyx_L3_error) } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "bound") < 0)) __PYX_ERR(0, 5380, __pyx_L3_error) } } else if (PyTuple_GET_SIZE(__pyx_args) != 2) { goto __pyx_L5_argtuple_error; } else { values[0] = PyTuple_GET_ITEM(__pyx_args, 0); values[1] = PyTuple_GET_ITEM(__pyx_args, 1); } __pyx_v_start = values[0]; __pyx_v_endex = values[1]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("bound", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 5380, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("bytesparse._c.Memory.bound", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_72bound(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), __pyx_v_start, __pyx_v_endex); int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_72bound(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_start, PyObject *__pyx_v_endex) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("bound", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("bytesparse._c.Memory.bound", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
5381: self: 'Memory',
5382: start: Optional[Address],
5383: endex: Optional[Address],
5384: ) -> ClosedInterval:
5385: r"""Bounds addresses.
5386:
5387: It bounds the given addresses to stay within memory limits.
5388: ``None`` is used to ignore a limit for the `start` or `endex`
5389: directions.
5390:
5391: In case of stored data, :attr:`content_start` and
5392: :attr:`content_endex` are used as bounds.
5393:
5394: In case of trimming limits, :attr:`trim_start` or :attr:`trim_endex`
5395: are used as bounds, when not ``None``.
5396:
5397: In case `start` and `endex` are in the wrong order, one clamps
5398: the other if present (see the Python implementation for details).
5399:
5400: Returns:
5401: tuple of int: Bounded `start` and `endex`, closed interval.
5402:
5403: Examples:
5404: >>> Memory().bound()
5405: (0, 0)
5406: >>> Memory().bound(endex=100)
5407: (0, 0)
5408:
5409: ~~~
5410:
5411: +---+---+---+---+---+---+---+---+---+
5412: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
5413: +===+===+===+===+===+===+===+===+===+
5414: | |[A | B | C]| |[x | y | z]| |
5415: +---+---+---+---+---+---+---+---+---+
5416:
5417: >>> memory = Memory(blocks=[[1, b'ABC'], [5, b'xyz']])
5418: >>> memory.bound(0, 30)
5419: (1, 8)
5420: >>> memory.bound(2, 6)
5421: (2, 6)
5422: >>> memory.bound(endex=6)
5423: (1, 6)
5424: >>> memory.bound(start=2)
5425: (2, 8)
5426:
5427: ~~~
5428:
5429: +---+---+---+---+---+---+---+---+---+
5430: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
5431: +===+===+===+===+===+===+===+===+===+
5432: | |[[[| |[A | B | C]| | |)))|
5433: +---+---+---+---+---+---+---+---+---+
5434:
5435: >>> memory = Memory(blocks=[[3, b'ABC']], start=1, endex=8)
5436: >>> memory.bound()
5437: (1, 8)
5438: >>> memory.bound(0, 30)
5439: (1, 8)
5440: >>> memory.bound(2, 6)
5441: (2, 6)
5442: >>> memory.bound(start=2)
5443: (2, 8)
5444: >>> memory.bound(endex=6)
5445: (1, 6)
5446: """
5447:
+5448: return self.bound_(start, endex)
__Pyx_XDECREF(__pyx_r); __pyx_t_1 = __pyx_convert__to_py___pyx_ctuple_addr_t__and_addr_t(((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->bound_(__pyx_v_self, __pyx_v_start, __pyx_v_endex)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 5448, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0;
5449:
+5450: def _block_index_at(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_75_block_index_at(PyObject *__pyx_v_self, PyObject *__pyx_v_address); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_74_block_index_at[] = "Memory._block_index_at(self: u'Memory', address: Address) -> Optional[BlockIndex]\nLocates the block enclosing an address.\n\n Returns the index of the block enclosing the given address.\n\n Arguments:\n address (int):\n Address of the target item.\n\n Returns:\n int: Block index if found, ``None`` otherwise.\n\n Example:\n +---+---+---+---+---+---+---+---+---+---+---+---+\n | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11|\n +===+===+===+===+===+===+===+===+===+===+===+===+\n | |[A | B | C | D]| |[$]| |[x | y | z]| |\n +---+---+---+---+---+---+---+---+---+---+---+---+\n | | 0 | 0 | 0 | 0 | | 1 | | 2 | 2 | 2 | |\n +---+---+---+---+---+---+---+---+---+---+---+---+\n\n >>> memory = Memory(blocks=[[1, b'ABCD'], [6, b'$'], [8, b'xyz']])\n >>> [memory._block_index_at(i) for i in range(12)]\n [None, 0, 0, 0, 0, None, 1, None, 2, 2, 2, None]\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_75_block_index_at(PyObject *__pyx_v_self, PyObject *__pyx_v_address) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("_block_index_at (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_74_block_index_at(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), ((PyObject *)__pyx_v_address)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_74_block_index_at(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_address) { Py_ssize_t __pyx_v_block_index; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("_block_index_at", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_4); __Pyx_AddTraceback("bytesparse._c.Memory._block_index_at", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
5451: self: 'Memory',
5452: address: Address,
5453: ) -> Optional[BlockIndex]:
5454: r"""Locates the block enclosing an address.
5455:
5456: Returns the index of the block enclosing the given address.
5457:
5458: Arguments:
5459: address (int):
5460: Address of the target item.
5461:
5462: Returns:
5463: int: Block index if found, ``None`` otherwise.
5464:
5465: Example:
5466: +---+---+---+---+---+---+---+---+---+---+---+---+
5467: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11|
5468: +===+===+===+===+===+===+===+===+===+===+===+===+
5469: | |[A | B | C | D]| |[$]| |[x | y | z]| |
5470: +---+---+---+---+---+---+---+---+---+---+---+---+
5471: | | 0 | 0 | 0 | 0 | | 1 | | 2 | 2 | 2 | |
5472: +---+---+---+---+---+---+---+---+---+---+---+---+
5473:
5474: >>> memory = Memory(blocks=[[1, b'ABCD'], [6, b'$'], [8, b'xyz']])
5475: >>> [memory._block_index_at(i) for i in range(12)]
5476: [None, 0, 0, 0, 0, None, 1, None, 2, 2, 2, None]
5477: """
5478: cdef:
5479: ssize_t block_index
5480:
+5481: block_index = Rack_IndexAt(self._, address)
__pyx_t_1 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_address); if (unlikely((__pyx_t_1 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 5481, __pyx_L1_error) __pyx_t_2 = __pyx_f_10bytesparse_2_c_Rack_IndexAt(__pyx_v_self->_, __pyx_t_1); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-2L))) __PYX_ERR(0, 5481, __pyx_L1_error) __pyx_v_block_index = __pyx_t_2;
+5482: return None if block_index < 0 else block_index
__Pyx_XDECREF(__pyx_r); if (((__pyx_v_block_index < 0) != 0)) { __Pyx_INCREF(Py_None); __pyx_t_3 = Py_None; } else { __pyx_t_4 = PyInt_FromSsize_t(__pyx_v_block_index); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 5482, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __pyx_t_3 = __pyx_t_4; __pyx_t_4 = 0; } __pyx_r = __pyx_t_3; __pyx_t_3 = 0; goto __pyx_L0;
5483:
+5484: def _block_index_start(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_77_block_index_start(PyObject *__pyx_v_self, PyObject *__pyx_v_address); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_76_block_index_start[] = "Memory._block_index_start(self: u'Memory', address: Address) -> BlockIndex\nLocates the first block inside of an address range.\n\n Returns the index of the first block whose start address is greater than\n or equal to `address`.\n\n Useful to find the initial block index in a ranged search.\n\n Arguments:\n address (int):\n Inclusive start address of the scanned range.\n\n Returns:\n int: First block index since `address`.\n\n Example:\n +---+---+---+---+---+---+---+---+---+---+---+---+\n | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11|\n +===+===+===+===+===+===+===+===+===+===+===+===+\n | |[A | B | C | D]| |[$]| |[x | y | z]| |\n +---+---+---+---+---+---+---+---+---+---+---+---+\n | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 2 | 2 | 2 | 2 | 3 |\n +---+---+---+---+---+---+---+---+---+---+---+---+\n\n >>> memory = Memory(blocks=[[1, b'ABCD'], [6, b'$'], [8, b'xyz']])\n >>> [memory._block_index_start(i) for i in range(12)]\n [0, 0, 0, 0, 0, 1, 1, 2, 2, 2, 2, 3]\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_77_block_index_start(PyObject *__pyx_v_self, PyObject *__pyx_v_address) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("_block_index_start (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_76_block_index_start(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), ((PyObject *)__pyx_v_address)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_76_block_index_start(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_address) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("_block_index_start", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("bytesparse._c.Memory._block_index_start", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
5485: self: 'Memory',
5486: address: Address,
5487: ) -> BlockIndex:
5488: r"""Locates the first block inside of an address range.
5489:
5490: Returns the index of the first block whose start address is greater than
5491: or equal to `address`.
5492:
5493: Useful to find the initial block index in a ranged search.
5494:
5495: Arguments:
5496: address (int):
5497: Inclusive start address of the scanned range.
5498:
5499: Returns:
5500: int: First block index since `address`.
5501:
5502: Example:
5503: +---+---+---+---+---+---+---+---+---+---+---+---+
5504: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11|
5505: +===+===+===+===+===+===+===+===+===+===+===+===+
5506: | |[A | B | C | D]| |[$]| |[x | y | z]| |
5507: +---+---+---+---+---+---+---+---+---+---+---+---+
5508: | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 2 | 2 | 2 | 2 | 3 |
5509: +---+---+---+---+---+---+---+---+---+---+---+---+
5510:
5511: >>> memory = Memory(blocks=[[1, b'ABCD'], [6, b'$'], [8, b'xyz']])
5512: >>> [memory._block_index_start(i) for i in range(12)]
5513: [0, 0, 0, 0, 0, 1, 1, 2, 2, 2, 2, 3]
5514: """
5515:
+5516: return Rack_IndexStart(self._, address)
__Pyx_XDECREF(__pyx_r); __pyx_t_1 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_address); if (unlikely((__pyx_t_1 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 5516, __pyx_L1_error) __pyx_t_2 = __pyx_f_10bytesparse_2_c_Rack_IndexStart(__pyx_v_self->_, __pyx_t_1); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-2L))) __PYX_ERR(0, 5516, __pyx_L1_error) __pyx_t_3 = PyInt_FromSsize_t(__pyx_t_2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 5516, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_r = __pyx_t_3; __pyx_t_3 = 0; goto __pyx_L0;
5517:
+5518: def _block_index_endex(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_79_block_index_endex(PyObject *__pyx_v_self, PyObject *__pyx_v_address); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_78_block_index_endex[] = "Memory._block_index_endex(self: u'Memory', address: Address) -> BlockIndex\nLocates the first block after an address range.\n\n Returns the index of the first block whose end address is lesser than or\n equal to `address`.\n\n Useful to find the termination block index in a ranged search.\n\n Arguments:\n address (int):\n Exclusive end address of the scanned range.\n\n Returns:\n int: First block index after `address`.\n\n Example:\n +---+---+---+---+---+---+---+---+---+---+---+---+\n | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11|\n +===+===+===+===+===+===+===+===+===+===+===+===+\n | |[A | B | C | D]| |[$]| |[x | y | z]| |\n +---+---+---+---+---+---+---+---+---+---+---+---+\n | 0 | 1 | 1 | 1 | 1 | 1 | 2 | 2 | 3 | 3 | 3 | 3 |\n +---+---+---+---+---+---+---+---+---+---+---+---+\n\n >>> memory = Memory(blocks=[[1, b'ABCD'], [6, b'$'], [8, b'xyz']])\n >>> [memory._block_index_endex(i) for i in range(12)]\n [0, 1, 1, 1, 1, 1, 2, 2, 3, 3, 3, 3]\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_79_block_index_endex(PyObject *__pyx_v_self, PyObject *__pyx_v_address) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("_block_index_endex (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_78_block_index_endex(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), ((PyObject *)__pyx_v_address)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_78_block_index_endex(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_address) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("_block_index_endex", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("bytesparse._c.Memory._block_index_endex", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
5519: self: 'Memory',
5520: address: Address,
5521: ) -> BlockIndex:
5522: r"""Locates the first block after an address range.
5523:
5524: Returns the index of the first block whose end address is lesser than or
5525: equal to `address`.
5526:
5527: Useful to find the termination block index in a ranged search.
5528:
5529: Arguments:
5530: address (int):
5531: Exclusive end address of the scanned range.
5532:
5533: Returns:
5534: int: First block index after `address`.
5535:
5536: Example:
5537: +---+---+---+---+---+---+---+---+---+---+---+---+
5538: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11|
5539: +===+===+===+===+===+===+===+===+===+===+===+===+
5540: | |[A | B | C | D]| |[$]| |[x | y | z]| |
5541: +---+---+---+---+---+---+---+---+---+---+---+---+
5542: | 0 | 1 | 1 | 1 | 1 | 1 | 2 | 2 | 3 | 3 | 3 | 3 |
5543: +---+---+---+---+---+---+---+---+---+---+---+---+
5544:
5545: >>> memory = Memory(blocks=[[1, b'ABCD'], [6, b'$'], [8, b'xyz']])
5546: >>> [memory._block_index_endex(i) for i in range(12)]
5547: [0, 1, 1, 1, 1, 1, 2, 2, 3, 3, 3, 3]
5548: """
5549:
+5550: return Rack_IndexEndex(self._, address)
__Pyx_XDECREF(__pyx_r); __pyx_t_1 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_address); if (unlikely((__pyx_t_1 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 5550, __pyx_L1_error) __pyx_t_2 = __pyx_f_10bytesparse_2_c_Rack_IndexEndex(__pyx_v_self->_, __pyx_t_1); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-2L))) __PYX_ERR(0, 5550, __pyx_L1_error) __pyx_t_3 = PyInt_FromSsize_t(__pyx_t_2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 5550, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_r = __pyx_t_3; __pyx_t_3 = 0; goto __pyx_L0;
5551:
+5552: cdef int peek_(self, addr_t address) except -2:
static int __pyx_f_10bytesparse_2_c_6Memory_peek_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, addr_t __pyx_v_address) { addr_t __pyx_v_address_; Py_ssize_t __pyx_v_block_index; Block_ const *__pyx_v_block; int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("peek_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Memory.peek_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -2; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
5553: cdef:
+5554: addr_t address_ = address
__pyx_v_address_ = __pyx_v_address;
5555: ssize_t block_index
5556: const Block_* block
5557:
+5558: block_index = Rack_IndexAt(self._, address_)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_Rack_IndexAt(__pyx_v_self->_, __pyx_v_address_); if (unlikely(__pyx_t_1 == ((Py_ssize_t)-2L))) __PYX_ERR(0, 5558, __pyx_L1_error)
__pyx_v_block_index = __pyx_t_1;
+5559: if block_index < 0:
__pyx_t_2 = ((__pyx_v_block_index < 0) != 0); if (__pyx_t_2) { /* … */ }
+5560: return -1
__pyx_r = -1; goto __pyx_L0;
5561: else:
+5562: block = Rack_Get__(self._, <size_t>block_index)
/*else*/ { __pyx_v_block = __pyx_f_10bytesparse_2_c_Rack_Get__(__pyx_v_self->_, ((size_t)__pyx_v_block_index));
+5563: return Block_Get__(block, address_ - Block_Start(block))
__pyx_r = __pyx_f_10bytesparse_2_c_Block_Get__(__pyx_v_block, (__pyx_v_address_ - __pyx_f_10bytesparse_2_c_Block_Start(__pyx_v_block))); goto __pyx_L0; }
5564:
+5565: def peek(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_81peek(PyObject *__pyx_v_self, PyObject *__pyx_v_address); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_80peek[] = "Memory.peek(self: u'Memory', address: Address) -> Optional[Value]\nGets the item at an address.\n\n Returns:\n int: The item at `address`, ``None`` if empty.\n\n Examples:\n +---+---+---+---+---+---+---+---+---+---+---+---+\n | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11|\n +===+===+===+===+===+===+===+===+===+===+===+===+\n | |[A | B | C | D]| |[$]| |[x | y | z]| |\n +---+---+---+---+---+---+---+---+---+---+---+---+\n\n >>> memory = Memory(blocks=[[1, b'ABCD'], [6, b'$'], [8, b'xyz']])\n >>> memory.peek(3) # -> ord('C') = 67\n 67\n >>> memory.peek(6) # -> ord('$') = 36\n 36\n >>> memory.peek(10) # -> ord('z') = 122\n 122\n >>> memory.peek(0)\n None\n >>> memory.peek(7)\n None\n >>> memory.peek(11)\n None\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_81peek(PyObject *__pyx_v_self, PyObject *__pyx_v_address) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("peek (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_80peek(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), ((PyObject *)__pyx_v_address)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_80peek(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_address) { int __pyx_v_value; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("peek", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_4); __Pyx_AddTraceback("bytesparse._c.Memory.peek", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
5566: self: 'Memory',
5567: address: Address,
5568: ) -> Optional[Value]:
5569: r"""Gets the item at an address.
5570:
5571: Returns:
5572: int: The item at `address`, ``None`` if empty.
5573:
5574: Examples:
5575: +---+---+---+---+---+---+---+---+---+---+---+---+
5576: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11|
5577: +===+===+===+===+===+===+===+===+===+===+===+===+
5578: | |[A | B | C | D]| |[$]| |[x | y | z]| |
5579: +---+---+---+---+---+---+---+---+---+---+---+---+
5580:
5581: >>> memory = Memory(blocks=[[1, b'ABCD'], [6, b'$'], [8, b'xyz']])
5582: >>> memory.peek(3) # -> ord('C') = 67
5583: 67
5584: >>> memory.peek(6) # -> ord('$') = 36
5585: 36
5586: >>> memory.peek(10) # -> ord('z') = 122
5587: 122
5588: >>> memory.peek(0)
5589: None
5590: >>> memory.peek(7)
5591: None
5592: >>> memory.peek(11)
5593: None
5594: """
5595: cdef:
5596: int value
5597:
+5598: value = self.peek_(<addr_t>address)
__pyx_t_1 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_address); if (unlikely((__pyx_t_1 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 5598, __pyx_L1_error) __pyx_t_2 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->peek_(__pyx_v_self, ((addr_t)__pyx_t_1)); if (unlikely(__pyx_t_2 == ((int)-2))) __PYX_ERR(0, 5598, __pyx_L1_error) __pyx_v_value = __pyx_t_2;
+5599: return None if value < 0 else value
__Pyx_XDECREF(__pyx_r); if (((__pyx_v_value < 0) != 0)) { __Pyx_INCREF(Py_None); __pyx_t_3 = Py_None; } else { __pyx_t_4 = __Pyx_PyInt_From_int(__pyx_v_value); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 5599, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __pyx_t_3 = __pyx_t_4; __pyx_t_4 = 0; } __pyx_r = __pyx_t_3; __pyx_t_3 = 0; goto __pyx_L0;
5600:
+5601: cdef int poke_none_(self, addr_t address) except -2:
static int __pyx_f_10bytesparse_2_c_6Memory_poke_none_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, addr_t __pyx_v_address) { int __pyx_v_value; int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("poke_none_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Memory.poke_none_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -2; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
5602: cdef:
5603: int value
5604:
5605: # Standard clear method
+5606: value = self.peek_(address)
__pyx_t_1 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->peek_(__pyx_v_self, __pyx_v_address); if (unlikely(__pyx_t_1 == ((int)-2))) __PYX_ERR(0, 5606, __pyx_L1_error)
__pyx_v_value = __pyx_t_1;
+5607: self._erase_(address, address + 1, False, False) # clear
__pyx_t_2 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_erase_(__pyx_v_self, __pyx_v_address, (__pyx_v_address + 1), 0, 0); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 5607, __pyx_L1_error)
+5608: return value
__pyx_r = __pyx_v_value; goto __pyx_L0;
5609:
+5610: cdef vint poke_none__(self, addr_t address) except -1:
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_6Memory_poke_none__(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, addr_t __pyx_v_address) { __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("poke_none__", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Memory.poke_none__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
5611: # Standard clear method
+5612: self._erase_(address, address + 1, False, False) # clear
__pyx_t_1 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_erase_(__pyx_v_self, __pyx_v_address, (__pyx_v_address + 1), 0, 0); if (unlikely(__pyx_t_1 == ((int)-1))) __PYX_ERR(0, 5612, __pyx_L1_error)
5613:
+5614: cdef int poke_(self, addr_t address, byte_t item) except -2:
static int __pyx_f_10bytesparse_2_c_6Memory_poke_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, addr_t __pyx_v_address, byte_t __pyx_v_item) { Rack_ *__pyx_v_blocks; size_t __pyx_v_block_count; size_t __pyx_v_block_index; Block_ *__pyx_v_block; addr_t __pyx_v_block_start; addr_t __pyx_v_block_endex; Block_ *__pyx_v_block2; addr_t __pyx_v_block_start2; int __pyx_v_value; int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("poke_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Memory.poke_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -2; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
5615: cdef:
+5616: Rack_* blocks = self._
__pyx_t_1 = __pyx_v_self->_; __pyx_v_blocks = __pyx_t_1;
+5617: size_t block_count = Rack_Length(blocks)
__pyx_v_block_count = __pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks);
5618: size_t block_index
5619: Block_* block
5620: addr_t block_start
5621: addr_t block_endex
5622: Block_* block2
5623: addr_t block_start2
5624: int value
5625:
+5626: block_index = Rack_IndexEndex(blocks, address) - 1
__pyx_t_2 = __pyx_f_10bytesparse_2_c_Rack_IndexEndex(__pyx_v_blocks, __pyx_v_address); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-2L))) __PYX_ERR(0, 5626, __pyx_L1_error)
__pyx_v_block_index = (__pyx_t_2 - 1);
5627:
+5628: if block_index < block_count:
__pyx_t_3 = ((__pyx_v_block_index < __pyx_v_block_count) != 0); if (__pyx_t_3) { /* … */ }
+5629: block = Rack_Get__(blocks, block_index)
__pyx_v_block = __pyx_f_10bytesparse_2_c_Rack_Get__(__pyx_v_blocks, __pyx_v_block_index);
+5630: block_start = Block_Start(block)
__pyx_v_block_start = __pyx_f_10bytesparse_2_c_Block_Start(__pyx_v_block);
+5631: block_endex = Block_Endex(block)
__pyx_v_block_endex = __pyx_f_10bytesparse_2_c_Block_Endex(__pyx_v_block);
5632:
+5633: if block_start <= address < block_endex:
__pyx_t_3 = (__pyx_v_block_start <= __pyx_v_address); if (__pyx_t_3) { __pyx_t_3 = (__pyx_v_address < __pyx_v_block_endex); } __pyx_t_4 = (__pyx_t_3 != 0); if (__pyx_t_4) { /* … */ }
5634: # Address within existing block, update directly
+5635: address -= block_start
__pyx_v_address = (__pyx_v_address - __pyx_v_block_start);
+5636: value = Block_Get__(block, <size_t>address)
__pyx_v_value = __pyx_f_10bytesparse_2_c_Block_Get__(__pyx_v_block, ((size_t)__pyx_v_address));
+5637: Block_Set__(block, <size_t>address, item)
(void)(__pyx_f_10bytesparse_2_c_Block_Set__(__pyx_v_block, ((size_t)__pyx_v_address), __pyx_v_item));
+5638: return value
__pyx_r = __pyx_v_value; goto __pyx_L0;
5639:
+5640: elif address == block_endex:
__pyx_t_4 = ((__pyx_v_address == __pyx_v_block_endex) != 0); if (__pyx_t_4) { /* … */ }
5641: # Address just after the end of the block, append
+5642: block = Block_Append(block, item)
__pyx_t_5 = __pyx_f_10bytesparse_2_c_Block_Append(__pyx_v_block, __pyx_v_item); if (unlikely(__pyx_t_5 == ((Block_ *)NULL))) __PYX_ERR(0, 5642, __pyx_L1_error)
__pyx_v_block = __pyx_t_5;
+5643: Rack_Set__(blocks, block_index, block) # update pointer
(void)(__pyx_f_10bytesparse_2_c_Rack_Set__(__pyx_v_blocks, __pyx_v_block_index, __pyx_v_block));
5644:
+5645: block_index += 1
__pyx_v_block_index = (__pyx_v_block_index + 1);
+5646: if block_index < block_count:
__pyx_t_4 = ((__pyx_v_block_index < __pyx_v_block_count) != 0); if (__pyx_t_4) { /* … */ }
+5647: block2 = Rack_Get__(blocks, block_index)
__pyx_v_block2 = __pyx_f_10bytesparse_2_c_Rack_Get__(__pyx_v_blocks, __pyx_v_block_index);
+5648: block_start2 = Block_Start(block2)
__pyx_v_block_start2 = __pyx_f_10bytesparse_2_c_Block_Start(__pyx_v_block2);
5649:
+5650: if block_endex + 1 == block_start2:
__pyx_t_4 = (((__pyx_v_block_endex + 1) == __pyx_v_block_start2) != 0); if (__pyx_t_4) { /* … */ }
5651: # Merge with the following contiguous block
+5652: block = Block_Extend(block, block2)
__pyx_t_5 = __pyx_f_10bytesparse_2_c_Block_Extend(__pyx_v_block, __pyx_v_block2); if (unlikely(__pyx_t_5 == ((Block_ *)NULL))) __PYX_ERR(0, 5652, __pyx_L1_error)
__pyx_v_block = __pyx_t_5;
+5653: Rack_Set__(blocks, block_index - 1, block) # update pointer
(void)(__pyx_f_10bytesparse_2_c_Rack_Set__(__pyx_v_blocks, (__pyx_v_block_index - 1), __pyx_v_block));
+5654: self._ = blocks = Rack_Pop_(blocks, block_index, NULL)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_Rack_Pop_(__pyx_v_blocks, __pyx_v_block_index, NULL); if (unlikely(__pyx_t_1 == ((Rack_ *)NULL))) __PYX_ERR(0, 5654, __pyx_L1_error)
__pyx_v_self->_ = __pyx_t_1;
__pyx_v_blocks = __pyx_t_1;
+5655: return -1
__pyx_r = -1; goto __pyx_L0;
5656:
5657: else:
+5658: block_index += 1
/*else*/ { __pyx_v_block_index = (__pyx_v_block_index + 1);
+5659: if block_index < block_count:
__pyx_t_4 = ((__pyx_v_block_index < __pyx_v_block_count) != 0); if (__pyx_t_4) { /* … */ } }
+5660: block = Rack_Get__(blocks, block_index)
__pyx_v_block = __pyx_f_10bytesparse_2_c_Rack_Get__(__pyx_v_blocks, __pyx_v_block_index);
+5661: block_start = Block_Start(block)
__pyx_v_block_start = __pyx_f_10bytesparse_2_c_Block_Start(__pyx_v_block);
5662:
+5663: if address + 1 == block_start:
__pyx_t_4 = (((__pyx_v_address + 1) == __pyx_v_block_start) != 0); if (__pyx_t_4) { /* … */ }
5664: # Prepend to the next block
+5665: block = Block_AppendLeft(block, item)
__pyx_t_5 = __pyx_f_10bytesparse_2_c_Block_AppendLeft(__pyx_v_block, __pyx_v_item); if (unlikely(__pyx_t_5 == ((Block_ *)NULL))) __PYX_ERR(0, 5665, __pyx_L1_error)
__pyx_v_block = __pyx_t_5;
+5666: Rack_Set__(blocks, block_index, block) # update pointer
(void)(__pyx_f_10bytesparse_2_c_Rack_Set__(__pyx_v_blocks, __pyx_v_block_index, __pyx_v_block));
+5667: block.address -= 1 # update address
__pyx_v_block->address = (__pyx_v_block->address - 1);
+5668: return -1
__pyx_r = -1; goto __pyx_L0;
5669:
5670: # There is no faster way than the standard block writing method
+5671: self._erase_(address, address + 1, False, True) # insert
__pyx_t_4 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_erase_(__pyx_v_self, __pyx_v_address, (__pyx_v_address + 1), 0, 1); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(0, 5671, __pyx_L1_error)
+5672: self._insert_(address, 1, &item, False)
__pyx_t_4 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_insert_(__pyx_v_self, __pyx_v_address, 1, (&__pyx_v_item), 0); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(0, 5672, __pyx_L1_error)
5673:
+5674: self._crop_(self._trim_start, self._trim_endex, None)
__pyx_t_6 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_crop_(__pyx_v_self, __pyx_v_self->_trim_start, __pyx_v_self->_trim_endex, ((PyObject*)Py_None)); if (unlikely(__pyx_t_6 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 5674, __pyx_L1_error)
+5675: return -1
__pyx_r = -1; goto __pyx_L0;
5676:
+5677: def poke(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_83poke(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_82poke[] = "Memory.poke(self: u'Memory', address: Address, item: Optional[Union[AnyBytes, Value]]) -> Optional[Value]\nSets the item at an address.\n\n Arguments:\n address (int):\n Address of the target item.\n\n item (int or byte):\n Item to set, ``None`` to clear the cell.\n\n Returns:\n int: The previous item at `address`, ``None`` if empty.\n\n Examples:\n +---+---+---+---+---+---+---+---+---+---+---+---+\n | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11|\n +===+===+===+===+===+===+===+===+===+===+===+===+\n | |[A | B | C | D]| |[$]| |[x | y | z]| |\n +---+---+---+---+---+---+---+---+---+---+---+---+\n\n >>> memory = Memory(blocks=[[1, b'ABCD'], [6, b'$'], [8, b'xyz']])\n >>> memory.poke(3, b'@') # -> ord('C') = 67\n 67\n >>> memory.peek(3) # -> ord('@') = 64\n 64\n >>> memory = Memory(blocks=[[1, b'ABCD'], [6, b'$'], [8, b'xyz']])\n >>> memory.poke(5, '@')\n None\n >>> memory.peek(5) # -> ord('@') = 64\n 64\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_83poke(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_address = 0; PyObject *__pyx_v_item = 0; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("poke (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_address,&__pyx_n_s_item,0}; PyObject* values[2] = {0,0}; if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_address)) != 0)) kw_args--; else goto __pyx_L5_argtuple_error; CYTHON_FALLTHROUGH; case 1: if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_item)) != 0)) kw_args--; else { __Pyx_RaiseArgtupleInvalid("poke", 1, 2, 2, 1); __PYX_ERR(0, 5677, __pyx_L3_error) } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "poke") < 0)) __PYX_ERR(0, 5677, __pyx_L3_error) } } else if (PyTuple_GET_SIZE(__pyx_args) != 2) { goto __pyx_L5_argtuple_error; } else { values[0] = PyTuple_GET_ITEM(__pyx_args, 0); values[1] = PyTuple_GET_ITEM(__pyx_args, 1); } __pyx_v_address = values[0]; __pyx_v_item = values[1]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("poke", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 5677, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("bytesparse._c.Memory.poke", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_82poke(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), __pyx_v_address, __pyx_v_item); int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_82poke(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_address, PyObject *__pyx_v_item) { addr_t __pyx_v_address_; int __pyx_v_value; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("poke", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_7); __Pyx_XDECREF(__pyx_t_8); __Pyx_AddTraceback("bytesparse._c.Memory.poke", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
5678: self: 'Memory',
5679: address: Address,
5680: item: Optional[Union[AnyBytes, Value]],
5681: ) -> Optional[Value]:
5682: r"""Sets the item at an address.
5683:
5684: Arguments:
5685: address (int):
5686: Address of the target item.
5687:
5688: item (int or byte):
5689: Item to set, ``None`` to clear the cell.
5690:
5691: Returns:
5692: int: The previous item at `address`, ``None`` if empty.
5693:
5694: Examples:
5695: +---+---+---+---+---+---+---+---+---+---+---+---+
5696: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11|
5697: +===+===+===+===+===+===+===+===+===+===+===+===+
5698: | |[A | B | C | D]| |[$]| |[x | y | z]| |
5699: +---+---+---+---+---+---+---+---+---+---+---+---+
5700:
5701: >>> memory = Memory(blocks=[[1, b'ABCD'], [6, b'$'], [8, b'xyz']])
5702: >>> memory.poke(3, b'@') # -> ord('C') = 67
5703: 67
5704: >>> memory.peek(3) # -> ord('@') = 64
5705: 64
5706: >>> memory = Memory(blocks=[[1, b'ABCD'], [6, b'$'], [8, b'xyz']])
5707: >>> memory.poke(5, '@')
5708: None
5709: >>> memory.peek(5) # -> ord('@') = 64
5710: 64
5711: """
5712: cdef:
+5713: addr_t address_ = <addr_t>address
__pyx_t_1 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_address); if (unlikely((__pyx_t_1 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 5713, __pyx_L1_error) __pyx_v_address_ = ((addr_t)__pyx_t_1);
5714: int value
5715:
+5716: if item is None:
__pyx_t_2 = (__pyx_v_item == Py_None); __pyx_t_3 = (__pyx_t_2 != 0); if (__pyx_t_3) { /* … */ goto __pyx_L3; }
+5717: value = self.poke_none_(address_)
__pyx_t_4 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->poke_none_(__pyx_v_self, __pyx_v_address_); if (unlikely(__pyx_t_4 == ((int)-2))) __PYX_ERR(0, 5717, __pyx_L1_error)
__pyx_v_value = __pyx_t_4;
5718: else:
+5719: if isinstance(item, int):
/*else*/ {
__pyx_t_3 = PyInt_Check(__pyx_v_item);
__pyx_t_2 = (__pyx_t_3 != 0);
if (__pyx_t_2) {
/* … */
goto __pyx_L4;
}
+5720: value = self.poke_(address_, <byte_t>item)
__pyx_t_5 = __Pyx_PyInt_As_byte_t(__pyx_v_item); if (unlikely((__pyx_t_5 == ((byte_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 5720, __pyx_L1_error) __pyx_t_4 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->poke_(__pyx_v_self, __pyx_v_address_, ((byte_t)__pyx_t_5)); if (unlikely(__pyx_t_4 == ((int)-2))) __PYX_ERR(0, 5720, __pyx_L1_error) __pyx_v_value = __pyx_t_4;
5721: else:
+5722: if len(item) != 1:
/*else*/ { __pyx_t_6 = PyObject_Length(__pyx_v_item); if (unlikely(__pyx_t_6 == ((Py_ssize_t)-1))) __PYX_ERR(0, 5722, __pyx_L1_error) __pyx_t_2 = ((__pyx_t_6 != 1) != 0); if (unlikely(__pyx_t_2)) { /* … */ }
+5723: raise ValueError('expecting single item')
__pyx_t_7 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__21, NULL); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 5723, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); __Pyx_Raise(__pyx_t_7, 0, 0, 0); __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; __PYX_ERR(0, 5723, __pyx_L1_error)
+5724: value = self.poke_(address_, <byte_t>item[0])
__pyx_t_7 = __Pyx_GetItemInt(__pyx_v_item, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 5724, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); __pyx_t_5 = __Pyx_PyInt_As_byte_t(__pyx_t_7); if (unlikely((__pyx_t_5 == ((byte_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 5724, __pyx_L1_error) __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; __pyx_t_4 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->poke_(__pyx_v_self, __pyx_v_address_, ((byte_t)__pyx_t_5)); if (unlikely(__pyx_t_4 == ((int)-2))) __PYX_ERR(0, 5724, __pyx_L1_error) __pyx_v_value = __pyx_t_4; } __pyx_L4:; } __pyx_L3:;
5725:
+5726: return None if value < 0 else value
__Pyx_XDECREF(__pyx_r); if (((__pyx_v_value < 0) != 0)) { __Pyx_INCREF(Py_None); __pyx_t_7 = Py_None; } else { __pyx_t_8 = __Pyx_PyInt_From_int(__pyx_v_value); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 5726, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_8); __pyx_t_7 = __pyx_t_8; __pyx_t_8 = 0; } __pyx_r = __pyx_t_7; __pyx_t_7 = 0; goto __pyx_L0;
5727:
+5728: cdef Memory extract_(self, addr_t start, addr_t endex,
static struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_f_10bytesparse_2_c_6Memory_extract_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, addr_t __pyx_v_start, addr_t __pyx_v_endex, size_t __pyx_v_pattern_size, byte_t const *__pyx_v_pattern_ptr, saddr_t __pyx_v_step, int __pyx_v_bound) { Rack_ const *__pyx_v_blocks1; size_t __pyx_v_block_count; size_t __pyx_v_block_index; size_t __pyx_v_block_index_start; size_t __pyx_v_block_index_endex; struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_memory = 0; Rack_ *__pyx_v_blocks2; Block_ *__pyx_v_block2; addr_t __pyx_v_offset; Block_ *__pyx_v_pattern; int __pyx_v_value; CYTHON_UNUSED saddr_t __pyx_v_skip; Block_ *__pyx_v_block1; PyObject *__pyx_v_pattern_obj = NULL; struct __pyx_obj_10bytesparse_2_c_Rover *__pyx_v_rover = NULL; struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("extract_", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_13); __Pyx_XDECREF(__pyx_t_14); __Pyx_XDECREF(((PyObject *)__pyx_t_16)); __Pyx_AddTraceback("bytesparse._c.Memory.extract_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 0; __pyx_L0:; __Pyx_XDECREF((PyObject *)__pyx_v_memory); __Pyx_XDECREF(__pyx_v_pattern_obj); __Pyx_XDECREF((PyObject *)__pyx_v_rover); __Pyx_XGIVEREF((PyObject *)__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
5729: size_t pattern_size, const byte_t* pattern_ptr,
5730: saddr_t step, bint bound):
5731: cdef:
+5732: const Rack_* blocks1 = self._
__pyx_t_1 = __pyx_v_self->_; __pyx_v_blocks1 = __pyx_t_1;
+5733: size_t block_count = Rack_Length(blocks1)
__pyx_v_block_count = __pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks1);
5734: size_t block_index
5735: size_t block_index_start
5736: size_t block_index_endex
5737: Memory memory
5738: Rack_* blocks2
5739: Block_* block2
5740: addr_t offset
+5741: Block_* pattern = NULL
__pyx_v_pattern = NULL;
5742: int value
5743: saddr_t skip
5744:
+5745: memory = Memory()
__pyx_t_2 = __Pyx_PyObject_CallNoArg(((PyObject *)__pyx_ptype_10bytesparse_2_c_Memory)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 5745, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_v_memory = ((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_t_2); __pyx_t_2 = 0;
5746:
+5747: if step == 1:
__pyx_t_3 = ((__pyx_v_step == 1) != 0); if (__pyx_t_3) { /* … */ goto __pyx_L3; }
+5748: if start < endex and block_count:
__pyx_t_4 = ((__pyx_v_start < __pyx_v_endex) != 0); if (__pyx_t_4) { } else { __pyx_t_3 = __pyx_t_4; goto __pyx_L5_bool_binop_done; } __pyx_t_4 = (__pyx_v_block_count != 0); __pyx_t_3 = __pyx_t_4; __pyx_L5_bool_binop_done:; if (__pyx_t_3) { /* … */ goto __pyx_L4; }
+5749: block_index_start = Rack_IndexStart(blocks1, start)
__pyx_t_5 = __pyx_f_10bytesparse_2_c_Rack_IndexStart(__pyx_v_blocks1, __pyx_v_start); if (unlikely(__pyx_t_5 == ((Py_ssize_t)-2L))) __PYX_ERR(0, 5749, __pyx_L1_error)
__pyx_v_block_index_start = __pyx_t_5;
+5750: block_index_endex = Rack_IndexEndex(blocks1, endex)
__pyx_t_5 = __pyx_f_10bytesparse_2_c_Rack_IndexEndex(__pyx_v_blocks1, __pyx_v_endex); if (unlikely(__pyx_t_5 == ((Py_ssize_t)-2L))) __PYX_ERR(0, 5750, __pyx_L1_error)
__pyx_v_block_index_endex = __pyx_t_5;
5751: else:
+5752: block_index_start = 0
/*else*/ { __pyx_v_block_index_start = 0;
+5753: block_index_endex = 0
__pyx_v_block_index_endex = 0; } __pyx_L4:;
5754:
5755: # Reserve slots to clone blocks
+5756: blocks2 = memory._
__pyx_t_1 = __pyx_v_memory->_; __pyx_v_blocks2 = __pyx_t_1;
+5757: block_count = block_index_endex - block_index_start
__pyx_v_block_count = (__pyx_v_block_index_endex - __pyx_v_block_index_start);
+5758: memory._ = blocks2 = Rack_Reserve_(blocks2, 0, block_count)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_Rack_Reserve_(__pyx_v_blocks2, 0, __pyx_v_block_count); if (unlikely(__pyx_t_1 == ((Rack_ *)NULL))) __PYX_ERR(0, 5758, __pyx_L1_error)
__pyx_v_memory->_ = __pyx_t_1;
__pyx_v_blocks2 = __pyx_t_1;
+5759: try:
{ /*try:*/ { /* … */ } __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; goto __pyx_L12_try_end; __pyx_L7_error:; __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; /* … */ __Pyx_XGIVEREF(__pyx_t_6); __Pyx_XGIVEREF(__pyx_t_7); __Pyx_XGIVEREF(__pyx_t_8); __Pyx_ExceptionReset(__pyx_t_6, __pyx_t_7, __pyx_t_8); goto __pyx_L1_error; __pyx_L12_try_end:; }
5760: # Clone blocks into the new memory
+5761: for block_index in range(block_count):
__pyx_t_9 = __pyx_v_block_count; __pyx_t_10 = __pyx_t_9; for (__pyx_t_11 = 0; __pyx_t_11 < __pyx_t_10; __pyx_t_11+=1) { __pyx_v_block_index = __pyx_t_11;
+5762: block1 = Rack_Get__(blocks1, block_index_start + block_index)
__pyx_v_block1 = __pyx_f_10bytesparse_2_c_Rack_Get__(__pyx_v_blocks1, (__pyx_v_block_index_start + __pyx_v_block_index));
+5763: block2 = Block_Copy(block1)
__pyx_t_12 = __pyx_f_10bytesparse_2_c_Block_Copy(__pyx_v_block1); if (unlikely(__pyx_t_12 == ((Block_ *)NULL))) __PYX_ERR(0, 5763, __pyx_L7_error)
__pyx_v_block2 = __pyx_t_12;
+5764: Rack_Set__(blocks2, block_index, block2)
(void)(__pyx_f_10bytesparse_2_c_Rack_Set__(__pyx_v_blocks2, __pyx_v_block_index, __pyx_v_block2)); }
+5765: except:
/*except:*/ { __Pyx_AddTraceback("bytesparse._c.Memory.extract_", __pyx_clineno, __pyx_lineno, __pyx_filename); if (__Pyx_GetException(&__pyx_t_2, &__pyx_t_13, &__pyx_t_14) < 0) __PYX_ERR(0, 5765, __pyx_L9_except_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_GOTREF(__pyx_t_13); __Pyx_GOTREF(__pyx_t_14);
+5766: memory._ = blocks2 = Rack_Clear(blocks2) # orphan
__pyx_t_1 = __pyx_f_10bytesparse_2_c_Rack_Clear(__pyx_v_blocks2); if (unlikely(__pyx_t_1 == ((Rack_ *)NULL))) __PYX_ERR(0, 5766, __pyx_L9_except_error)
__pyx_v_memory->_ = __pyx_t_1;
__pyx_v_blocks2 = __pyx_t_1;
+5767: raise
__Pyx_GIVEREF(__pyx_t_2); __Pyx_GIVEREF(__pyx_t_13); __Pyx_XGIVEREF(__pyx_t_14); __Pyx_ErrRestoreWithState(__pyx_t_2, __pyx_t_13, __pyx_t_14); __pyx_t_2 = 0; __pyx_t_13 = 0; __pyx_t_14 = 0; __PYX_ERR(0, 5767, __pyx_L9_except_error) } __pyx_L9_except_error:;
5768:
5769: # Trim data in excess
+5770: memory._crop_(start, endex, None)
__pyx_t_15 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_memory->__pyx_vtab)->_crop_(__pyx_v_memory, __pyx_v_start, __pyx_v_endex, ((PyObject*)Py_None)); if (unlikely(__pyx_t_15 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 5770, __pyx_L1_error)
5771:
+5772: if pattern_size and pattern_ptr:
__pyx_t_4 = (__pyx_v_pattern_size != 0); if (__pyx_t_4) { } else { __pyx_t_3 = __pyx_t_4; goto __pyx_L18_bool_binop_done; } __pyx_t_4 = (__pyx_v_pattern_ptr != 0); __pyx_t_3 = __pyx_t_4; __pyx_L18_bool_binop_done:; if (__pyx_t_3) { /* … */ }
+5773: pattern = Block_Create(0, pattern_size, pattern_ptr)
__pyx_t_12 = __pyx_f_10bytesparse_2_c_Block_Create(0, __pyx_v_pattern_size, __pyx_v_pattern_ptr); if (unlikely(__pyx_t_12 == ((Block_ *)NULL))) __PYX_ERR(0, 5773, __pyx_L1_error)
__pyx_v_pattern = __pyx_t_12;
+5774: try:
{ /*try:*/ { /* … */ } __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; goto __pyx_L25_try_end; __pyx_L20_error:; __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; /* … */ __Pyx_XGIVEREF(__pyx_t_8); __Pyx_XGIVEREF(__pyx_t_7); __Pyx_XGIVEREF(__pyx_t_6); __Pyx_ExceptionReset(__pyx_t_8, __pyx_t_7, __pyx_t_6); goto __pyx_L1_error; __pyx_L25_try_end:; }
+5775: memory.flood_(start, endex, &pattern, None)
__pyx_t_15 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_memory->__pyx_vtab)->flood_(__pyx_v_memory, __pyx_v_start, __pyx_v_endex, (&__pyx_v_pattern), ((PyObject*)Py_None)); if (unlikely(__pyx_t_15 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 5775, __pyx_L20_error)
+5776: except:
/*except:*/ { __Pyx_AddTraceback("bytesparse._c.Memory.extract_", __pyx_clineno, __pyx_lineno, __pyx_filename); if (__Pyx_GetException(&__pyx_t_14, &__pyx_t_13, &__pyx_t_2) < 0) __PYX_ERR(0, 5776, __pyx_L22_except_error) __Pyx_GOTREF(__pyx_t_14); __Pyx_GOTREF(__pyx_t_13); __Pyx_GOTREF(__pyx_t_2);
+5777: Block_Free(pattern) # orphan
(void)(__pyx_f_10bytesparse_2_c_Block_Free(__pyx_v_pattern));
+5778: raise
__Pyx_GIVEREF(__pyx_t_14); __Pyx_GIVEREF(__pyx_t_13); __Pyx_XGIVEREF(__pyx_t_2); __Pyx_ErrRestoreWithState(__pyx_t_14, __pyx_t_13, __pyx_t_2); __pyx_t_14 = 0; __pyx_t_13 = 0; __pyx_t_2 = 0; __PYX_ERR(0, 5778, __pyx_L22_except_error) } __pyx_L22_except_error:;
5779: else:
+5780: if step > 1:
/*else*/ { __pyx_t_3 = ((__pyx_v_step > 1) != 0); if (__pyx_t_3) { /* … */ } } __pyx_L3:;
+5781: block2 = NULL
__pyx_v_block2 = NULL;
+5782: offset = start
__pyx_v_offset = __pyx_v_start;
+5783: pattern_obj = <const byte_t[:pattern_size]>pattern_ptr if pattern_ptr else None
if ((__pyx_v_pattern_ptr != 0)) { if (!__pyx_v_pattern_ptr) { PyErr_SetString(PyExc_ValueError,"Cannot create cython.array from NULL pointer"); __PYX_ERR(0, 5783, __pyx_L1_error) } __pyx_t_14 = __pyx_format_from_typeinfo(&__Pyx_TypeInfo_nn_byte_t__const__); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 5783, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_14); __pyx_t_13 = Py_BuildValue((char*) "(" __PYX_BUILD_PY_SSIZE_T ")", ((Py_ssize_t)__pyx_v_pattern_size)); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 5783, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_13); __pyx_t_16 = __pyx_array_new(__pyx_t_13, sizeof(byte_t const ), PyBytes_AS_STRING(__pyx_t_14), (char *) "c", (char *) __pyx_v_pattern_ptr); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 5783, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_16); __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0; __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; __pyx_t_2 = ((PyObject *)__pyx_t_16); __pyx_t_16 = 0; } else { __Pyx_INCREF(Py_None); __pyx_t_2 = Py_None; } __pyx_v_pattern_obj = __pyx_t_2; __pyx_t_2 = 0;
+5784: rover = Rover(self, start, endex, pattern_obj, True, False)
__pyx_t_2 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_start); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 5784, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_14 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_endex); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 5784, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_14); __pyx_t_13 = PyTuple_New(6); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 5784, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_13); __Pyx_INCREF(((PyObject *)__pyx_v_self)); __Pyx_GIVEREF(((PyObject *)__pyx_v_self)); PyTuple_SET_ITEM(__pyx_t_13, 0, ((PyObject *)__pyx_v_self)); __Pyx_GIVEREF(__pyx_t_2); PyTuple_SET_ITEM(__pyx_t_13, 1, __pyx_t_2); __Pyx_GIVEREF(__pyx_t_14); PyTuple_SET_ITEM(__pyx_t_13, 2, __pyx_t_14); __Pyx_INCREF(__pyx_v_pattern_obj); __Pyx_GIVEREF(__pyx_v_pattern_obj); PyTuple_SET_ITEM(__pyx_t_13, 3, __pyx_v_pattern_obj); __Pyx_INCREF(Py_True); __Pyx_GIVEREF(Py_True); PyTuple_SET_ITEM(__pyx_t_13, 4, Py_True); __Pyx_INCREF(Py_False); __Pyx_GIVEREF(Py_False); PyTuple_SET_ITEM(__pyx_t_13, 5, Py_False); __pyx_t_2 = 0; __pyx_t_14 = 0; __pyx_t_14 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_10bytesparse_2_c_Rover), __pyx_t_13, NULL); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 5784, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_14); __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0; __pyx_v_rover = ((struct __pyx_obj_10bytesparse_2_c_Rover *)__pyx_t_14); __pyx_t_14 = 0;
+5785: try:
/*try:*/ { { /*try:*/ { /* … */ } __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; goto __pyx_L37_try_end; __pyx_L32_error:; __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; __Pyx_XDECREF(((PyObject *)__pyx_t_16)); __pyx_t_16 = 0; __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; /* … */ __Pyx_XGIVEREF(__pyx_t_6); __Pyx_XGIVEREF(__pyx_t_7); __Pyx_XGIVEREF(__pyx_t_8); __Pyx_ExceptionReset(__pyx_t_6, __pyx_t_7, __pyx_t_8); goto __pyx_L30_error; __pyx_L33_exception_handled:; __Pyx_XGIVEREF(__pyx_t_6); __Pyx_XGIVEREF(__pyx_t_7); __Pyx_XGIVEREF(__pyx_t_8); __Pyx_ExceptionReset(__pyx_t_6, __pyx_t_7, __pyx_t_8); __pyx_L37_try_end:; } }
+5786: while True:
while (1) {
+5787: value = rover.next_()
__pyx_t_17 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Rover *)__pyx_v_rover->__pyx_vtab)->next_(__pyx_v_rover); if (unlikely(__pyx_t_17 == ((int)-2))) __PYX_ERR(0, 5787, __pyx_L32_error)
__pyx_v_value = __pyx_t_17;
+5788: if value < 0:
__pyx_t_3 = ((__pyx_v_value < 0) != 0); if (__pyx_t_3) { /* … */ goto __pyx_L40; }
+5789: if block2:
__pyx_t_3 = (__pyx_v_block2 != 0); if (__pyx_t_3) { /* … */ }
+5790: memory._ = Rack_Append(memory._, block2)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_Rack_Append(__pyx_v_memory->_, __pyx_v_block2); if (unlikely(__pyx_t_1 == ((Rack_ *)NULL))) __PYX_ERR(0, 5790, __pyx_L32_error)
__pyx_v_memory->_ = __pyx_t_1;
+5791: block2 = NULL
__pyx_v_block2 = NULL;
5792: else:
+5793: if not block2:
/*else*/ { __pyx_t_3 = ((!(__pyx_v_block2 != 0)) != 0); if (__pyx_t_3) { /* … */ }
+5794: block2 = Block_Alloc(offset, 0, False)
__pyx_t_12 = __pyx_f_10bytesparse_2_c_Block_Alloc(__pyx_v_offset, 0, 0); if (unlikely(__pyx_t_12 == ((Block_ *)NULL))) __PYX_ERR(0, 5794, __pyx_L32_error)
__pyx_v_block2 = __pyx_t_12;
+5795: block2 = Block_Append(block2, <byte_t>value)
__pyx_t_12 = __pyx_f_10bytesparse_2_c_Block_Append(__pyx_v_block2, ((byte_t)__pyx_v_value)); if (unlikely(__pyx_t_12 == ((Block_ *)NULL))) __PYX_ERR(0, 5795, __pyx_L32_error)
__pyx_v_block2 = __pyx_t_12;
}
__pyx_L40:;
5796:
+5797: offset += 1
__pyx_v_offset = (__pyx_v_offset + 1);
+5798: for skip in range(step - 1):
__pyx_t_18 = (__pyx_v_step - 1); __pyx_t_19 = __pyx_t_18; for (__pyx_t_20 = 0; __pyx_t_20 < __pyx_t_19; __pyx_t_20+=1) { __pyx_v_skip = __pyx_t_20;
+5799: rover.next_()
__pyx_t_17 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Rover *)__pyx_v_rover->__pyx_vtab)->next_(__pyx_v_rover); if (unlikely(__pyx_t_17 == ((int)-2))) __PYX_ERR(0, 5799, __pyx_L32_error)
}
}
+5800: except StopIteration:
__pyx_t_17 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_StopIteration); if (__pyx_t_17) { __Pyx_AddTraceback("bytesparse._c.Memory.extract_", __pyx_clineno, __pyx_lineno, __pyx_filename); if (__Pyx_GetException(&__pyx_t_14, &__pyx_t_13, &__pyx_t_2) < 0) __PYX_ERR(0, 5800, __pyx_L34_except_error) __Pyx_GOTREF(__pyx_t_14); __Pyx_GOTREF(__pyx_t_13); __Pyx_GOTREF(__pyx_t_2);
+5801: if block2:
__pyx_t_3 = (__pyx_v_block2 != 0); if (__pyx_t_3) { /* … */ } __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; goto __pyx_L33_exception_handled; } goto __pyx_L34_except_error; __pyx_L34_except_error:;
+5802: memory._ = Rack_Append(memory._, block2)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_Rack_Append(__pyx_v_memory->_, __pyx_v_block2); if (unlikely(__pyx_t_1 == ((Rack_ *)NULL))) __PYX_ERR(0, 5802, __pyx_L34_except_error)
__pyx_v_memory->_ = __pyx_t_1;
+5803: block2 = NULL
__pyx_v_block2 = NULL;
5804: finally:
+5805: block2 = Block_Free(block2) # orphan
/*finally:*/ { /*normal exit:*/{ __pyx_v_block2 = __pyx_f_10bytesparse_2_c_Block_Free(__pyx_v_block2); goto __pyx_L31; } __pyx_L30_error:; /*exception exit:*/{ __Pyx_PyThreadState_declare __Pyx_PyThreadState_assign __pyx_t_8 = 0; __pyx_t_7 = 0; __pyx_t_6 = 0; __pyx_t_23 = 0; __pyx_t_24 = 0; __pyx_t_25 = 0; __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; __Pyx_XDECREF(((PyObject *)__pyx_t_16)); __pyx_t_16 = 0; __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_23, &__pyx_t_24, &__pyx_t_25); if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_8, &__pyx_t_7, &__pyx_t_6) < 0)) __Pyx_ErrFetch(&__pyx_t_8, &__pyx_t_7, &__pyx_t_6); __Pyx_XGOTREF(__pyx_t_8); __Pyx_XGOTREF(__pyx_t_7); __Pyx_XGOTREF(__pyx_t_6); __Pyx_XGOTREF(__pyx_t_23); __Pyx_XGOTREF(__pyx_t_24); __Pyx_XGOTREF(__pyx_t_25); __pyx_t_17 = __pyx_lineno; __pyx_t_21 = __pyx_clineno; __pyx_t_22 = __pyx_filename; { __pyx_v_block2 = __pyx_f_10bytesparse_2_c_Block_Free(__pyx_v_block2); } if (PY_MAJOR_VERSION >= 3) { __Pyx_XGIVEREF(__pyx_t_23); __Pyx_XGIVEREF(__pyx_t_24); __Pyx_XGIVEREF(__pyx_t_25); __Pyx_ExceptionReset(__pyx_t_23, __pyx_t_24, __pyx_t_25); } __Pyx_XGIVEREF(__pyx_t_8); __Pyx_XGIVEREF(__pyx_t_7); __Pyx_XGIVEREF(__pyx_t_6); __Pyx_ErrRestore(__pyx_t_8, __pyx_t_7, __pyx_t_6); __pyx_t_8 = 0; __pyx_t_7 = 0; __pyx_t_6 = 0; __pyx_t_23 = 0; __pyx_t_24 = 0; __pyx_t_25 = 0; __pyx_lineno = __pyx_t_17; __pyx_clineno = __pyx_t_21; __pyx_filename = __pyx_t_22; goto __pyx_L1_error; } __pyx_L31:; }
5806:
+5807: if bound:
__pyx_t_3 = (__pyx_v_bound != 0); if (__pyx_t_3) { /* … */ }
+5808: endex = offset
__pyx_v_endex = __pyx_v_offset;
+5809: if bound:
__pyx_t_3 = (__pyx_v_bound != 0); if (__pyx_t_3) { /* … */ }
+5810: memory._trim_start_ = True
__pyx_v_memory->_trim_start_ = 1;
+5811: memory._trim_endex_ = True
__pyx_v_memory->_trim_endex_ = 1;
+5812: memory._trim_start = start
__pyx_v_memory->_trim_start = __pyx_v_start;
+5813: memory._trim_endex = endex
__pyx_v_memory->_trim_endex = __pyx_v_endex;
5814:
+5815: return memory
__Pyx_XDECREF(((PyObject *)__pyx_r)); __Pyx_INCREF(((PyObject *)__pyx_v_memory)); __pyx_r = __pyx_v_memory; goto __pyx_L0;
5816:
+5817: def extract(
/* Python wrapper */ static struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_pw_10bytesparse_2_c_6Memory_85extract(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_84extract[] = "Memory.extract(self: u'Memory', start: Optional[Address] = None, endex: Optional[Address] = None, pattern: Optional[Union[AnyBytes, Value]] = None, step: Optional[Address] = None, bound: bool = True) -> u'Memory'\nSelects items from a range.\n\n Arguments:\n start (int):\n Inclusive start of the extracted range.\n If ``None``, :attr:`start` is considered.\n\n endex (int):\n Exclusive end of the extracted range.\n If ``None``, :attr:`endex` is considered.\n\n pattern (items):\n Optional pattern of items to fill the emptiness.\n\n step (int):\n Optional address stepping between bytes extracted from the\n range. It has the same meaning of Python's :attr:`slice.step`,\n but negative steps are ignored.\n Please note that a `step` greater than 1 could take much more\n time to process than the default unitary step.\n\n bound (bool):\n The selected address range is applied to the resulting memory\n as its trimming range. This retains information about any\n initial and final emptiness of that range, which would be lost\n otherwise.\n\n Returns:\n :obj:`Memory`: A copy of the memory from the selected range.\n\n Examples:\n +---+---+---+---+---+---+---+---+---+---+---+---+\n | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11|\n +===+===+===+===+===+===+===+===+===+===+===+===+\n | |[A | B | C | D]| |[$]| |[x | y | z]| |\n +---+---+---+---+---+---+---+---+---+---+---+---+\n\n >>> memory = Memory(blocks=[[1, b'ABCD'], [6, b'$'], [8, b'xyz']])\n >>> memory.extract()._blocks\n [[1, b'ABCD'], [6, b'$'], [8, b'xyz']]\n >>> memory.extract(2, 9)._blocks\n [[2, b'BCD'], [6, b'$']"", [8, b'x']]\n >>> memory.extract(start=2)._blocks\n [[2, b'BCD'], [6, b'$'], [8, b'xyz']]\n >>> memory.extract(endex=9)._blocks\n [[1, b'ABCD'], [6, b'$'], [8, b'x']]\n >>> memory.extract(5, 8).span\n (5, 8)\n >>> memory.extract(pattern='.')._blocks\n [[1, b'ABCD.$.xyz']]\n >>> memory.extract(pattern='.', step=3)._blocks\n [[1, b'AD.z']]\n "; static struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_pw_10bytesparse_2_c_6Memory_85extract(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_start = 0; PyObject *__pyx_v_endex = 0; PyObject *__pyx_v_pattern = 0; PyObject *__pyx_v_step = 0; PyObject *__pyx_v_bound = 0; struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("extract (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_start,&__pyx_n_s_endex,&__pyx_n_s_pattern,&__pyx_n_s_step,&__pyx_n_s_bound,0}; PyObject* values[5] = {0,0,0,0,0}; /* … */ /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_pf_10bytesparse_2_c_6Memory_84extract(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_start, PyObject *__pyx_v_endex, PyObject *__pyx_v_pattern, PyObject *__pyx_v_step, PyObject *__pyx_v_bound) { addr_t __pyx_v_start_; addr_t __pyx_v_endex_; __Pyx_memviewslice __pyx_v_pattern_view = { 0, 0, { 0 }, { 0 }, { 0 } }; byte_t __pyx_v_pattern_value; size_t __pyx_v_pattern_size; byte_t const *__pyx_v_pattern_ptr; saddr_t __pyx_v_step_; int __pyx_v_bound_; struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("extract", 0); /* … */ /* function exit code */ __pyx_L1_error:; __PYX_XDEC_MEMVIEW(&__pyx_t_6, 1); __Pyx_XDECREF(__pyx_t_12); __Pyx_AddTraceback("bytesparse._c.Memory.extract", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __PYX_XDEC_MEMVIEW(&__pyx_v_pattern_view, 1); __Pyx_XGIVEREF((PyObject *)__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
5818: self: 'Memory',
+5819: start: Optional[Address] = None,
values[0] = ((PyObject *)Py_None);
+5820: endex: Optional[Address] = None,
values[1] = ((PyObject *)Py_None);
+5821: pattern: Optional[Union[AnyBytes, Value]] = None,
values[2] = ((PyObject *)Py_None);
+5822: step: Optional[Address] = None,
values[3] = ((PyObject *)Py_None);
+5823: bound: bool = True,
values[4] = ((PyObject *)Py_True); if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); CYTHON_FALLTHROUGH; case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); CYTHON_FALLTHROUGH; case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_start); if (value) { values[0] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 1: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_endex); if (value) { values[1] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 2: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pattern); if (value) { values[2] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 3: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_step); if (value) { values[3] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 4: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_bound); if (value) { values[4] = value; kw_args--; } } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "extract") < 0)) __PYX_ERR(0, 5817, __pyx_L3_error) } } else { switch (PyTuple_GET_SIZE(__pyx_args)) { case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); CYTHON_FALLTHROUGH; case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); CYTHON_FALLTHROUGH; case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } } __pyx_v_start = values[0]; __pyx_v_endex = values[1]; __pyx_v_pattern = values[2]; __pyx_v_step = values[3]; __pyx_v_bound = values[4]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("extract", 0, 0, 5, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 5817, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("bytesparse._c.Memory.extract", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_84extract(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), __pyx_v_start, __pyx_v_endex, __pyx_v_pattern, __pyx_v_step, __pyx_v_bound);
5824: ) -> 'Memory':
5825: r"""Selects items from a range.
5826:
5827: Arguments:
5828: start (int):
5829: Inclusive start of the extracted range.
5830: If ``None``, :attr:`start` is considered.
5831:
5832: endex (int):
5833: Exclusive end of the extracted range.
5834: If ``None``, :attr:`endex` is considered.
5835:
5836: pattern (items):
5837: Optional pattern of items to fill the emptiness.
5838:
5839: step (int):
5840: Optional address stepping between bytes extracted from the
5841: range. It has the same meaning of Python's :attr:`slice.step`,
5842: but negative steps are ignored.
5843: Please note that a `step` greater than 1 could take much more
5844: time to process than the default unitary step.
5845:
5846: bound (bool):
5847: The selected address range is applied to the resulting memory
5848: as its trimming range. This retains information about any
5849: initial and final emptiness of that range, which would be lost
5850: otherwise.
5851:
5852: Returns:
5853: :obj:`Memory`: A copy of the memory from the selected range.
5854:
5855: Examples:
5856: +---+---+---+---+---+---+---+---+---+---+---+---+
5857: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11|
5858: +===+===+===+===+===+===+===+===+===+===+===+===+
5859: | |[A | B | C | D]| |[$]| |[x | y | z]| |
5860: +---+---+---+---+---+---+---+---+---+---+---+---+
5861:
5862: >>> memory = Memory(blocks=[[1, b'ABCD'], [6, b'$'], [8, b'xyz']])
5863: >>> memory.extract()._blocks
5864: [[1, b'ABCD'], [6, b'$'], [8, b'xyz']]
5865: >>> memory.extract(2, 9)._blocks
5866: [[2, b'BCD'], [6, b'$'], [8, b'x']]
5867: >>> memory.extract(start=2)._blocks
5868: [[2, b'BCD'], [6, b'$'], [8, b'xyz']]
5869: >>> memory.extract(endex=9)._blocks
5870: [[1, b'ABCD'], [6, b'$'], [8, b'x']]
5871: >>> memory.extract(5, 8).span
5872: (5, 8)
5873: >>> memory.extract(pattern='.')._blocks
5874: [[1, b'ABCD.$.xyz']]
5875: >>> memory.extract(pattern='.', step=3)._blocks
5876: [[1, b'AD.z']]
5877: """
5878: cdef:
5879: addr_t start_
5880: addr_t endex_
5881: const byte_t[:] pattern_view
5882: byte_t pattern_value
5883: size_t pattern_size
5884: const byte_t* pattern_ptr
+5885: saddr_t step_ = <saddr_t>1 if step is None else <saddr_t>step
__pyx_t_2 = (__pyx_v_step == Py_None); if ((__pyx_t_2 != 0)) { __pyx_t_1 = ((saddr_t)1); } else { __pyx_t_3 = __Pyx_PyInt_As_int_fast64_t(__pyx_v_step); if (unlikely((__pyx_t_3 == ((saddr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 5885, __pyx_L1_error) __pyx_t_1 = ((saddr_t)__pyx_t_3); } __pyx_v_step_ = __pyx_t_1;
+5886: bint bound_ = <bint>bound
__pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_v_bound); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 5886, __pyx_L1_error) __pyx_v_bound_ = __pyx_t_2;
5887:
+5888: if pattern is None:
__pyx_t_2 = (__pyx_v_pattern == Py_None); __pyx_t_4 = (__pyx_t_2 != 0); if (__pyx_t_4) { /* … */ goto __pyx_L3; }
+5889: pattern_size = 0
__pyx_v_pattern_size = 0;
+5890: pattern_ptr = NULL
__pyx_v_pattern_ptr = NULL;
5891:
+5892: elif isinstance(pattern, int):
__pyx_t_4 = PyInt_Check(__pyx_v_pattern);
__pyx_t_2 = (__pyx_t_4 != 0);
if (__pyx_t_2) {
/* … */
goto __pyx_L3;
}
+5893: pattern_value = <byte_t>pattern
__pyx_t_5 = __Pyx_PyInt_As_byte_t(__pyx_v_pattern); if (unlikely((__pyx_t_5 == ((byte_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 5893, __pyx_L1_error) __pyx_v_pattern_value = ((byte_t)__pyx_t_5);
+5894: pattern_size = 1
__pyx_v_pattern_size = 1;
+5895: pattern_ptr = &pattern_value
__pyx_v_pattern_ptr = (&__pyx_v_pattern_value);
5896:
5897: else:
+5898: pattern_view = pattern
/*else*/ { __pyx_t_6 = __Pyx_PyObject_to_MemoryviewSlice_ds_nn_byte_t__const__(__pyx_v_pattern, 0); if (unlikely(!__pyx_t_6.memview)) __PYX_ERR(0, 5898, __pyx_L1_error) __pyx_v_pattern_view = __pyx_t_6; __pyx_t_6.memview = NULL; __pyx_t_6.data = NULL;
+5899: pattern_size = len(pattern_view)
__pyx_t_7 = __Pyx_MemoryView_Len(__pyx_v_pattern_view);
__pyx_v_pattern_size = __pyx_t_7;
5900: with cython.boundscheck(False):
+5901: pattern_ptr = &pattern_view[0]
__pyx_t_8 = 0; if (__pyx_t_8 < 0) __pyx_t_8 += __pyx_v_pattern_view.shape[0]; __pyx_v_pattern_ptr = (&(*((byte_t const *) ( /* dim=0 */ (__pyx_v_pattern_view.data + __pyx_t_8 * __pyx_v_pattern_view.strides[0]) )))); } __pyx_L3:;
5902:
+5903: start_, endex_ = self.bound_(start, endex)
__pyx_t_9 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->bound_(__pyx_v_self, __pyx_v_start, __pyx_v_endex); __pyx_t_10 = __pyx_t_9.f0; __pyx_t_11 = __pyx_t_9.f1; __pyx_v_start_ = __pyx_t_10; __pyx_v_endex_ = __pyx_t_11;
+5904: return self.extract_(start_, endex_, pattern_size, pattern_ptr, step_, bound_)
__Pyx_XDECREF(((PyObject *)__pyx_r)); __pyx_t_12 = ((PyObject *)((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->extract_(__pyx_v_self, __pyx_v_start_, __pyx_v_endex_, __pyx_v_pattern_size, __pyx_v_pattern_ptr, __pyx_v_step_, __pyx_v_bound_)); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 5904, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_12); __pyx_r = ((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_t_12); __pyx_t_12 = 0; goto __pyx_L0;
5905:
+5906: cdef vint shift_left_(self, addr_t offset, list backups) except -1:
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_6Memory_shift_left_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, addr_t __pyx_v_offset, PyObject *__pyx_v_backups) { Rack_ *__pyx_v_blocks; size_t __pyx_v_block_index; Block_ *__pyx_v_block; __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("shift_left_", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Memory.shift_left_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
5907: cdef:
+5908: Rack_* blocks = self._
__pyx_t_1 = __pyx_v_self->_; __pyx_v_blocks = __pyx_t_1;
5909: size_t block_index
5910: Block_* block
5911:
+5912: if offset and Rack_Length(blocks):
__pyx_t_3 = (__pyx_v_offset != 0); if (__pyx_t_3) { } else { __pyx_t_2 = __pyx_t_3; goto __pyx_L4_bool_binop_done; } __pyx_t_3 = (__pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks) != 0); __pyx_t_2 = __pyx_t_3; __pyx_L4_bool_binop_done:; if (__pyx_t_2) { /* … */ }
+5913: self._pretrim_start_(ADDR_MAX, offset, backups)
__pyx_t_4 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_pretrim_start_(__pyx_v_self, ADDR_MAX, __pyx_v_offset, __pyx_v_backups); if (unlikely(__pyx_t_4 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 5913, __pyx_L1_error)
+5914: blocks = self._
__pyx_t_1 = __pyx_v_self->_; __pyx_v_blocks = __pyx_t_1;
5915:
+5916: for block_index in range(Rack_Length(blocks)):
__pyx_t_5 = __pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks); __pyx_t_6 = __pyx_t_5; for (__pyx_t_7 = 0; __pyx_t_7 < __pyx_t_6; __pyx_t_7+=1) { __pyx_v_block_index = __pyx_t_7;
+5917: block = Rack_Get__(blocks, block_index)
__pyx_v_block = __pyx_f_10bytesparse_2_c_Rack_Get__(__pyx_v_blocks, __pyx_v_block_index);
+5918: block.address -= offset
__pyx_v_block->address = (__pyx_v_block->address - __pyx_v_offset); }
5919:
+5920: cdef vint shift_right_(self, addr_t offset, list backups) except -1:
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_6Memory_shift_right_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, addr_t __pyx_v_offset, PyObject *__pyx_v_backups) { Rack_ *__pyx_v_blocks; size_t __pyx_v_block_index; Block_ *__pyx_v_block; __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("shift_right_", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Memory.shift_right_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
5921: cdef:
+5922: Rack_* blocks = self._
__pyx_t_1 = __pyx_v_self->_; __pyx_v_blocks = __pyx_t_1;
5923: size_t block_index
5924: Block_* block
5925:
+5926: if offset and Rack_Length(blocks):
__pyx_t_3 = (__pyx_v_offset != 0); if (__pyx_t_3) { } else { __pyx_t_2 = __pyx_t_3; goto __pyx_L4_bool_binop_done; } __pyx_t_3 = (__pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks) != 0); __pyx_t_2 = __pyx_t_3; __pyx_L4_bool_binop_done:; if (__pyx_t_2) { /* … */ }
+5927: self._pretrim_endex_(ADDR_MIN, offset, backups)
__pyx_t_4 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_pretrim_endex_(__pyx_v_self, ADDR_MIN, __pyx_v_offset, __pyx_v_backups); if (unlikely(__pyx_t_4 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 5927, __pyx_L1_error)
+5928: blocks = self._
__pyx_t_1 = __pyx_v_self->_; __pyx_v_blocks = __pyx_t_1;
5929:
+5930: for block_index in range(Rack_Length(blocks)):
__pyx_t_5 = __pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks); __pyx_t_6 = __pyx_t_5; for (__pyx_t_7 = 0; __pyx_t_7 < __pyx_t_6; __pyx_t_7+=1) { __pyx_v_block_index = __pyx_t_7;
+5931: block = Rack_Get__(blocks, block_index)
__pyx_v_block = __pyx_f_10bytesparse_2_c_Rack_Get__(__pyx_v_blocks, __pyx_v_block_index);
+5932: block.address += offset
__pyx_v_block->address = (__pyx_v_block->address + __pyx_v_offset); }
5933:
+5934: def shift(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_87shift(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_86shift[] = "Memory.shift(self: u'Memory', offset: Address, backups: Optional[MemoryList] = None) -> None\nShifts the items.\n\n Arguments:\n offset (int):\n Signed amount of address shifting.\n\n backups (list of :obj:`Memory`):\n Optional output list holding backup copies of the deleted\n items, before trimming.\n\n Examples:\n +---+---+---+---+---+---+---+---+---+---+---+\n | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11| 12|\n +===+===+===+===+===+===+===+===+===+===+===+\n | | | |[A | B | C]| |[x | y | z]| |\n +---+---+---+---+---+---+---+---+---+---+---+\n | |[A | B | C]| |[x | y | z]| | | |\n +---+---+---+---+---+---+---+---+---+---+---+\n\n >>> memory = Memory(blocks=[[5, b'ABC'], [9, b'xyz']])\n >>> memory.shift(-2)\n >>> memory._blocks\n [[3, b'ABC'], [7, b'xyz']]\n\n ~~~\n\n +---+---+---+---+---+---+---+---+---+---+---+\n | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11| 12|\n +===+===+===+===+===+===+===+===+===+===+===+\n | |[[[| |[A | B | C]| |[x | y | z]| |\n +---+---+---+---+---+---+---+---+---+---+---+\n | |[y | z]| | | | | | | | |\n +---+---+---+---+---+---+---+---+---+---+---+\n\n >>> memory = Memory(blocks=[[5, b'ABC'], [9, b'xyz']], start=2)\n >>> backups = []\n >>> memory.shift(-7, backups=backups)\n >>> memory._blocks\n [[2, b'yz']]\n >>> len(backups)\n 1\n >>> backups[0]._blocks\n [[5, b'ABC'], [9, b'x']]\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_87shift(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_offset = 0; PyObject *__pyx_v_backups = 0; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("shift (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_offset,&__pyx_n_s_backups,0}; PyObject* values[2] = {0,0}; /* … */ /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_86shift(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_offset, PyObject *__pyx_v_backups) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("shift", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("bytesparse._c.Memory.shift", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
5935: self: 'Memory',
5936: offset: Address,
+5937: backups: Optional[MemoryList] = None,
values[1] = ((PyObject *)Py_None); if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_offset)) != 0)) kw_args--; else goto __pyx_L5_argtuple_error; CYTHON_FALLTHROUGH; case 1: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_backups); if (value) { values[1] = value; kw_args--; } } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "shift") < 0)) __PYX_ERR(0, 5934, __pyx_L3_error) } } else { switch (PyTuple_GET_SIZE(__pyx_args)) { case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); break; default: goto __pyx_L5_argtuple_error; } } __pyx_v_offset = values[0]; __pyx_v_backups = values[1]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("shift", 0, 1, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 5934, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("bytesparse._c.Memory.shift", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_86shift(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), __pyx_v_offset, __pyx_v_backups);
5938: ) -> None:
5939: r"""Shifts the items.
5940:
5941: Arguments:
5942: offset (int):
5943: Signed amount of address shifting.
5944:
5945: backups (list of :obj:`Memory`):
5946: Optional output list holding backup copies of the deleted
5947: items, before trimming.
5948:
5949: Examples:
5950: +---+---+---+---+---+---+---+---+---+---+---+
5951: | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11| 12|
5952: +===+===+===+===+===+===+===+===+===+===+===+
5953: | | | |[A | B | C]| |[x | y | z]| |
5954: +---+---+---+---+---+---+---+---+---+---+---+
5955: | |[A | B | C]| |[x | y | z]| | | |
5956: +---+---+---+---+---+---+---+---+---+---+---+
5957:
5958: >>> memory = Memory(blocks=[[5, b'ABC'], [9, b'xyz']])
5959: >>> memory.shift(-2)
5960: >>> memory._blocks
5961: [[3, b'ABC'], [7, b'xyz']]
5962:
5963: ~~~
5964:
5965: +---+---+---+---+---+---+---+---+---+---+---+
5966: | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11| 12|
5967: +===+===+===+===+===+===+===+===+===+===+===+
5968: | |[[[| |[A | B | C]| |[x | y | z]| |
5969: +---+---+---+---+---+---+---+---+---+---+---+
5970: | |[y | z]| | | | | | | | |
5971: +---+---+---+---+---+---+---+---+---+---+---+
5972:
5973: >>> memory = Memory(blocks=[[5, b'ABC'], [9, b'xyz']], start=2)
5974: >>> backups = []
5975: >>> memory.shift(-7, backups=backups)
5976: >>> memory._blocks
5977: [[2, b'yz']]
5978: >>> len(backups)
5979: 1
5980: >>> backups[0]._blocks
5981: [[5, b'ABC'], [9, b'x']]
5982: """
5983:
+5984: if offset < 0:
__pyx_t_1 = PyObject_RichCompare(__pyx_v_offset, __pyx_int_0, Py_LT); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 5984, __pyx_L1_error) __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 5984, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; if (__pyx_t_2) { /* … */ }
+5985: return self.shift_left_(<addr_t>-offset, backups)
__Pyx_XDECREF(__pyx_r); __pyx_t_1 = PyNumber_Negative(__pyx_v_offset); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 5985, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_t_3 = __Pyx_PyInt_As_uint_fast64_t(__pyx_t_1); if (unlikely((__pyx_t_3 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 5985, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; if (!(likely(PyList_CheckExact(__pyx_v_backups))||((__pyx_v_backups) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "list", Py_TYPE(__pyx_v_backups)->tp_name), 0))) __PYX_ERR(0, 5985, __pyx_L1_error) __pyx_t_4 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->shift_left_(__pyx_v_self, ((addr_t)__pyx_t_3), ((PyObject*)__pyx_v_backups)); if (unlikely(__pyx_t_4 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 5985, __pyx_L1_error) __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 5985, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0;
5986: else:
+5987: return self.shift_right_(<addr_t>offset, backups)
/*else*/ { __Pyx_XDECREF(__pyx_r); __pyx_t_3 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_offset); if (unlikely((__pyx_t_3 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 5987, __pyx_L1_error) if (!(likely(PyList_CheckExact(__pyx_v_backups))||((__pyx_v_backups) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "list", Py_TYPE(__pyx_v_backups)->tp_name), 0))) __PYX_ERR(0, 5987, __pyx_L1_error) __pyx_t_4 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->shift_right_(__pyx_v_self, ((addr_t)__pyx_t_3), ((PyObject*)__pyx_v_backups)); if (unlikely(__pyx_t_4 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 5987, __pyx_L1_error) __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 5987, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0; }
5988:
+5989: cdef vint reserve_(self, addr_t address, addr_t size, list backups) except -1:
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_6Memory_reserve_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, addr_t __pyx_v_address, addr_t __pyx_v_size, PyObject *__pyx_v_backups) { addr_t __pyx_v_offset; Rack_ *__pyx_v_blocks; size_t __pyx_v_block_count; size_t __pyx_v_block_index; Block_ *__pyx_v_block; addr_t __pyx_v_block_start; Block_ *__pyx_v_block2; __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("reserve_", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_10); __Pyx_XDECREF(__pyx_t_11); __Pyx_XDECREF(__pyx_t_12); __Pyx_AddTraceback("bytesparse._c.Memory.reserve_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
5990: cdef:
5991: addr_t offset
+5992: Rack_* blocks = self._
__pyx_t_1 = __pyx_v_self->_; __pyx_v_blocks = __pyx_t_1;
5993: size_t block_count
5994: size_t block_index
5995: Block_* block
5996: addr_t block_start
5997: Block_* block2
5998:
+5999: if size and Rack_Length(blocks):
__pyx_t_3 = (__pyx_v_size != 0); if (__pyx_t_3) { } else { __pyx_t_2 = __pyx_t_3; goto __pyx_L4_bool_binop_done; } __pyx_t_3 = (__pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks) != 0); __pyx_t_2 = __pyx_t_3; __pyx_L4_bool_binop_done:; if (__pyx_t_2) { /* … */ }
+6000: self._pretrim_endex_(address, size, backups)
__pyx_t_4 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_pretrim_endex_(__pyx_v_self, __pyx_v_address, __pyx_v_size, __pyx_v_backups); if (unlikely(__pyx_t_4 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 6000, __pyx_L1_error)
6001:
+6002: blocks = self._
__pyx_t_1 = __pyx_v_self->_; __pyx_v_blocks = __pyx_t_1;
+6003: block_index = Rack_IndexStart(blocks, address)
__pyx_t_5 = __pyx_f_10bytesparse_2_c_Rack_IndexStart(__pyx_v_blocks, __pyx_v_address); if (unlikely(__pyx_t_5 == ((Py_ssize_t)-2L))) __PYX_ERR(0, 6003, __pyx_L1_error)
__pyx_v_block_index = __pyx_t_5;
+6004: block_count = Rack_Length(blocks)
__pyx_v_block_count = __pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks);
6005:
+6006: if block_index < block_count:
__pyx_t_2 = ((__pyx_v_block_index < __pyx_v_block_count) != 0); if (__pyx_t_2) { /* … */ }
+6007: block = Rack_Get_(blocks, block_index)
__pyx_t_6 = __pyx_f_10bytesparse_2_c_Rack_Get_(__pyx_v_blocks, __pyx_v_block_index); if (unlikely(__pyx_t_6 == ((Block_ *)NULL) && PyErr_Occurred())) __PYX_ERR(0, 6007, __pyx_L1_error) __pyx_v_block = __pyx_t_6;
+6008: block_start = Block_Start(block)
__pyx_v_block_start = __pyx_f_10bytesparse_2_c_Block_Start(__pyx_v_block);
6009:
+6010: if address > block_start:
__pyx_t_2 = ((__pyx_v_address > __pyx_v_block_start) != 0); if (__pyx_t_2) { /* … */ }
6011: # Split into two blocks, reserving emptiness
+6012: CheckAddSizeU(block_count, 1) # ensure free slot
__pyx_t_4 = __pyx_f_10bytesparse_2_c_CheckAddSizeU(__pyx_v_block_count, 1); if (unlikely(__pyx_t_4 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 6012, __pyx_L1_error)
+6013: offset = address - block_start
__pyx_v_offset = (__pyx_v_address - __pyx_v_block_start);
+6014: block2 = Block_GetSlice_(block, offset, SIZE_HMAX)
__pyx_t_6 = __pyx_f_10bytesparse_2_c_Block_GetSlice_(__pyx_v_block, __pyx_v_offset, SIZE_HMAX); if (unlikely(__pyx_t_6 == ((Block_ *)NULL))) __PYX_ERR(0, 6014, __pyx_L1_error)
__pyx_v_block2 = __pyx_t_6;
+6015: try:
{ /*try:*/ { /* … */ } __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; goto __pyx_L13_try_end; __pyx_L8_error:; /* … */ __Pyx_XGIVEREF(__pyx_t_7); __Pyx_XGIVEREF(__pyx_t_8); __Pyx_XGIVEREF(__pyx_t_9); __Pyx_ExceptionReset(__pyx_t_7, __pyx_t_8, __pyx_t_9); goto __pyx_L1_error; __pyx_L13_try_end:; }
+6016: block = Block_DelSlice_(block, offset, SIZE_HMAX)
__pyx_t_6 = __pyx_f_10bytesparse_2_c_Block_DelSlice_(__pyx_v_block, __pyx_v_offset, SIZE_HMAX); if (unlikely(__pyx_t_6 == ((Block_ *)NULL))) __PYX_ERR(0, 6016, __pyx_L8_error)
__pyx_v_block = __pyx_t_6;
6017:
+6018: Rack_Set__(blocks, block_index, block) # update pointer
(void)(__pyx_f_10bytesparse_2_c_Rack_Set__(__pyx_v_blocks, __pyx_v_block_index, __pyx_v_block));
+6019: block_index += 1
__pyx_v_block_index = (__pyx_v_block_index + 1);
6020:
+6021: CheckAddAddrU(address, size)
__pyx_t_4 = __pyx_f_10bytesparse_2_c_CheckAddAddrU(__pyx_v_address, __pyx_v_size); if (unlikely(__pyx_t_4 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 6021, __pyx_L8_error)
+6022: block2.address = address + size
__pyx_v_block2->address = (__pyx_v_address + __pyx_v_size);
+6023: self._ = blocks = Rack_Insert(blocks, block_index, block2)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_Rack_Insert(__pyx_v_blocks, __pyx_v_block_index, __pyx_v_block2); if (unlikely(__pyx_t_1 == ((Rack_ *)NULL))) __PYX_ERR(0, 6023, __pyx_L8_error)
__pyx_v_self->_ = __pyx_t_1;
__pyx_v_blocks = __pyx_t_1;
+6024: except:
/*except:*/ { __Pyx_AddTraceback("bytesparse._c.Memory.reserve_", __pyx_clineno, __pyx_lineno, __pyx_filename); if (__Pyx_GetException(&__pyx_t_10, &__pyx_t_11, &__pyx_t_12) < 0) __PYX_ERR(0, 6024, __pyx_L10_except_error) __Pyx_GOTREF(__pyx_t_10); __Pyx_GOTREF(__pyx_t_11); __Pyx_GOTREF(__pyx_t_12);
+6025: block2 = Block_Free(block2) # orphan
__pyx_v_block2 = __pyx_f_10bytesparse_2_c_Block_Free(__pyx_v_block2);
+6026: raise
__Pyx_GIVEREF(__pyx_t_10); __Pyx_GIVEREF(__pyx_t_11); __Pyx_XGIVEREF(__pyx_t_12); __Pyx_ErrRestoreWithState(__pyx_t_10, __pyx_t_11, __pyx_t_12); __pyx_t_10 = 0; __pyx_t_11 = 0; __pyx_t_12 = 0; __PYX_ERR(0, 6026, __pyx_L10_except_error) } __pyx_L10_except_error:;
+6027: block_index += 1
__pyx_v_block_index = (__pyx_v_block_index + 1);
6028:
+6029: for block_index in range(block_index, Rack_Length(blocks)):
__pyx_t_13 = __pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks); __pyx_t_14 = __pyx_t_13; for (__pyx_t_15 = __pyx_v_block_index; __pyx_t_15 < __pyx_t_14; __pyx_t_15+=1) { __pyx_v_block_index = __pyx_t_15;
+6030: block = Rack_Get_(blocks, block_index)
__pyx_t_6 = __pyx_f_10bytesparse_2_c_Rack_Get_(__pyx_v_blocks, __pyx_v_block_index); if (unlikely(__pyx_t_6 == ((Block_ *)NULL) && PyErr_Occurred())) __PYX_ERR(0, 6030, __pyx_L1_error) __pyx_v_block = __pyx_t_6;
+6031: block.address += size
__pyx_v_block->address = (__pyx_v_block->address + __pyx_v_size); }
6032:
+6033: def reserve(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_89reserve(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_88reserve[] = "Memory.reserve(self: u'Memory', address: Address, size: Address, backups: Optional[MemoryList] = None) -> None\nInserts emptiness.\n\n Reserves emptiness at the provided address.\n\n Arguments:\n address (int):\n Start address of the emptiness to insert.\n\n size (int):\n Size of the emptiness to insert.\n\n backups (list of :obj:`Memory`):\n Optional output list holding backup copies of the deleted\n items, before trimming.\n\n Examples:\n +---+---+---+---+---+---+---+---+---+---+---+\n | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11| 12|\n +===+===+===+===+===+===+===+===+===+===+===+\n | |[A | B | C]| |[x | y | z]| | | |\n +---+---+---+---+---+---+---+---+---+---+---+\n | |[A]| | | B | C]| |[x | y | z]| |\n +---+---+---+---+---+---+---+---+---+---+---+\n\n >>> memory = Memory(blocks=[[3, b'ABC'], [7, b'xyz']])\n >>> memory.reserve(4, 2)\n >>> memory._blocks\n [[2, b'A'], [6, b'BC'], [9, b'xyz']]\n\n ~~~\n\n +---+---+---+---+---+---+---+---+---+---+---+\n | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11| 12|\n +===+===+===+===+===+===+===+===+===+===+===+\n | | | |[A | B | C]| |[x | y | z]|)))|\n +---+---+---+---+---+---+---+---+---+---+---+\n | | | | | | | | |[A | B]|)))|\n +---+---+---+---+---+---+---+---+---+---+---+\n\n >>> memory = Memory(blocks=[[5, b'ABC'], [9, b'xyz']], endex=12)\n >>> backups = []\n >>> memory.reserve(5, 5, backups=backups)\n >>> memory._blocks\n [[10, b'AB']]\n >>> len(backups)\n 1\n >>> backups[0]._blocks\n [[7, b'C'], [9, b'xyz']]\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_89reserve(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_address = 0; PyObject *__pyx_v_size = 0; PyObject *__pyx_v_backups = 0; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("reserve (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_address,&__pyx_n_s_size,&__pyx_n_s_backups,0}; PyObject* values[3] = {0,0,0}; /* … */ /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_88reserve(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_address, PyObject *__pyx_v_size, PyObject *__pyx_v_backups) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("reserve", 0); /* … */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Memory.reserve", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
6034: self: 'Memory',
6035: address: Address,
6036: size: Address,
+6037: backups: Optional[MemoryList] = None,
values[2] = ((PyObject *)Py_None); if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_address)) != 0)) kw_args--; else goto __pyx_L5_argtuple_error; CYTHON_FALLTHROUGH; case 1: if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_size)) != 0)) kw_args--; else { __Pyx_RaiseArgtupleInvalid("reserve", 0, 2, 3, 1); __PYX_ERR(0, 6033, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 2: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_backups); if (value) { values[2] = value; kw_args--; } } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "reserve") < 0)) __PYX_ERR(0, 6033, __pyx_L3_error) } } else { switch (PyTuple_GET_SIZE(__pyx_args)) { case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); values[0] = PyTuple_GET_ITEM(__pyx_args, 0); break; default: goto __pyx_L5_argtuple_error; } } __pyx_v_address = values[0]; __pyx_v_size = values[1]; __pyx_v_backups = values[2]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("reserve", 0, 2, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 6033, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("bytesparse._c.Memory.reserve", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_88reserve(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), __pyx_v_address, __pyx_v_size, __pyx_v_backups);
6038: ) -> None:
6039: r"""Inserts emptiness.
6040:
6041: Reserves emptiness at the provided address.
6042:
6043: Arguments:
6044: address (int):
6045: Start address of the emptiness to insert.
6046:
6047: size (int):
6048: Size of the emptiness to insert.
6049:
6050: backups (list of :obj:`Memory`):
6051: Optional output list holding backup copies of the deleted
6052: items, before trimming.
6053:
6054: Examples:
6055: +---+---+---+---+---+---+---+---+---+---+---+
6056: | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11| 12|
6057: +===+===+===+===+===+===+===+===+===+===+===+
6058: | |[A | B | C]| |[x | y | z]| | | |
6059: +---+---+---+---+---+---+---+---+---+---+---+
6060: | |[A]| | | B | C]| |[x | y | z]| |
6061: +---+---+---+---+---+---+---+---+---+---+---+
6062:
6063: >>> memory = Memory(blocks=[[3, b'ABC'], [7, b'xyz']])
6064: >>> memory.reserve(4, 2)
6065: >>> memory._blocks
6066: [[2, b'A'], [6, b'BC'], [9, b'xyz']]
6067:
6068: ~~~
6069:
6070: +---+---+---+---+---+---+---+---+---+---+---+
6071: | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11| 12|
6072: +===+===+===+===+===+===+===+===+===+===+===+
6073: | | | |[A | B | C]| |[x | y | z]|)))|
6074: +---+---+---+---+---+---+---+---+---+---+---+
6075: | | | | | | | | |[A | B]|)))|
6076: +---+---+---+---+---+---+---+---+---+---+---+
6077:
6078: >>> memory = Memory(blocks=[[5, b'ABC'], [9, b'xyz']], endex=12)
6079: >>> backups = []
6080: >>> memory.reserve(5, 5, backups=backups)
6081: >>> memory._blocks
6082: [[10, b'AB']]
6083: >>> len(backups)
6084: 1
6085: >>> backups[0]._blocks
6086: [[7, b'C'], [9, b'xyz']]
6087: """
6088:
+6089: self.reserve_(<addr_t>address, <addr_t>size, backups)
__pyx_t_1 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_address); if (unlikely((__pyx_t_1 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 6089, __pyx_L1_error) __pyx_t_2 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_size); if (unlikely((__pyx_t_2 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 6089, __pyx_L1_error) if (!(likely(PyList_CheckExact(__pyx_v_backups))||((__pyx_v_backups) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "list", Py_TYPE(__pyx_v_backups)->tp_name), 0))) __PYX_ERR(0, 6089, __pyx_L1_error) __pyx_t_3 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->reserve_(__pyx_v_self, ((addr_t)__pyx_t_1), ((addr_t)__pyx_t_2), ((PyObject*)__pyx_v_backups)); if (unlikely(__pyx_t_3 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 6089, __pyx_L1_error)
6090:
+6091: cdef vint _insert_(self, addr_t address, size_t size, const byte_t* buffer, bint shift_after) except -1:
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_6Memory__insert_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, addr_t __pyx_v_address, size_t __pyx_v_size, byte_t const *__pyx_v_buffer, int __pyx_v_shift_after) { Rack_ *__pyx_v_blocks; size_t __pyx_v_block_index; Block_ *__pyx_v_block; addr_t __pyx_v_block_start; addr_t __pyx_v_block_endex; Block_ *__pyx_v_block2; addr_t __pyx_v_block_start2; size_t __pyx_v_offset; __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("_insert_", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_12); __Pyx_XDECREF(__pyx_t_13); __Pyx_XDECREF(__pyx_t_14); __Pyx_AddTraceback("bytesparse._c.Memory._insert_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
6092: cdef:
6093: Rack_* blocks
6094: size_t block_index
6095: Block_* block
6096: addr_t block_start
6097: addr_t block_endex
6098: Block_* block2
6099: addr_t block_start2
6100: size_t offset
6101:
+6102: if size:
__pyx_t_1 = (__pyx_v_size != 0); if (__pyx_t_1) { /* … */ }
+6103: blocks = self._
__pyx_t_2 = __pyx_v_self->_; __pyx_v_blocks = __pyx_t_2;
+6104: block_index = Rack_IndexStart(blocks, address)
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Rack_IndexStart(__pyx_v_blocks, __pyx_v_address); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-2L))) __PYX_ERR(0, 6104, __pyx_L1_error)
__pyx_v_block_index = __pyx_t_3;
6105:
+6106: if block_index:
__pyx_t_1 = (__pyx_v_block_index != 0); if (__pyx_t_1) { /* … */ }
+6107: block = Rack_Get_(blocks, block_index - 1)
__pyx_t_4 = __pyx_f_10bytesparse_2_c_Rack_Get_(__pyx_v_blocks, (__pyx_v_block_index - 1)); if (unlikely(__pyx_t_4 == ((Block_ *)NULL) && PyErr_Occurred())) __PYX_ERR(0, 6107, __pyx_L1_error) __pyx_v_block = __pyx_t_4;
+6108: block_start = Block_Start(block)
__pyx_v_block_start = __pyx_f_10bytesparse_2_c_Block_Start(__pyx_v_block);
+6109: block_endex = Block_Endex(block)
__pyx_v_block_endex = __pyx_f_10bytesparse_2_c_Block_Endex(__pyx_v_block);
6110:
+6111: if block_endex == address:
__pyx_t_1 = ((__pyx_v_block_endex == __pyx_v_address) != 0); if (__pyx_t_1) { /* … */ }
6112: # Extend previous block
+6113: block = Block_Extend_(block, size, buffer)
__pyx_t_4 = __pyx_f_10bytesparse_2_c_Block_Extend_(__pyx_v_block, __pyx_v_size, __pyx_v_buffer); if (unlikely(__pyx_t_4 == ((Block_ *)NULL))) __PYX_ERR(0, 6113, __pyx_L1_error)
__pyx_v_block = __pyx_t_4;
+6114: Rack_Set__(blocks, block_index - 1, block) # update pointer
(void)(__pyx_f_10bytesparse_2_c_Rack_Set__(__pyx_v_blocks, (__pyx_v_block_index - 1), __pyx_v_block));
6115:
6116: # Shift blocks after
+6117: if shift_after:
__pyx_t_1 = (__pyx_v_shift_after != 0); if (__pyx_t_1) { /* … */ goto __pyx_L6; }
+6118: for block_index in range(block_index, Rack_Length(blocks)):
__pyx_t_5 = __pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks); __pyx_t_6 = __pyx_t_5; for (__pyx_t_7 = __pyx_v_block_index; __pyx_t_7 < __pyx_t_6; __pyx_t_7+=1) { __pyx_v_block_index = __pyx_t_7;
+6119: block = Rack_Get_(blocks, block_index)
__pyx_t_4 = __pyx_f_10bytesparse_2_c_Rack_Get_(__pyx_v_blocks, __pyx_v_block_index); if (unlikely(__pyx_t_4 == ((Block_ *)NULL) && PyErr_Occurred())) __PYX_ERR(0, 6119, __pyx_L1_error) __pyx_v_block = __pyx_t_4;
+6120: CheckAddAddrU(block.address, size)
__pyx_t_8 = __pyx_f_10bytesparse_2_c_CheckAddAddrU(__pyx_v_block->address, __pyx_v_size); if (unlikely(__pyx_t_8 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 6120, __pyx_L1_error)
+6121: block.address += size
__pyx_v_block->address = (__pyx_v_block->address + __pyx_v_size); }
6122: else:
+6123: block_index += 1
/*else*/ { __pyx_v_block_index = (__pyx_v_block_index + 1);
+6124: if block_index < Rack_Length(blocks):
__pyx_t_1 = ((__pyx_v_block_index < __pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks)) != 0); if (__pyx_t_1) { /* … */ } } __pyx_L6:;
+6125: CheckAddAddrU(block_endex, size)
__pyx_t_8 = __pyx_f_10bytesparse_2_c_CheckAddAddrU(__pyx_v_block_endex, __pyx_v_size); if (unlikely(__pyx_t_8 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 6125, __pyx_L1_error)
+6126: block_endex += size
__pyx_v_block_endex = (__pyx_v_block_endex + __pyx_v_size);
6127:
+6128: block2 = Rack_Get_(blocks, block_index)
__pyx_t_4 = __pyx_f_10bytesparse_2_c_Rack_Get_(__pyx_v_blocks, __pyx_v_block_index); if (unlikely(__pyx_t_4 == ((Block_ *)NULL) && PyErr_Occurred())) __PYX_ERR(0, 6128, __pyx_L1_error) __pyx_v_block2 = __pyx_t_4;
+6129: block_start2 = Block_Start(block2)
__pyx_v_block_start2 = __pyx_f_10bytesparse_2_c_Block_Start(__pyx_v_block2);
6130:
6131: # Merge with next block
+6132: if block_endex == block_start2:
__pyx_t_1 = ((__pyx_v_block_endex == __pyx_v_block_start2) != 0); if (__pyx_t_1) { /* … */ }
+6133: block = Block_Extend(block, block2)
__pyx_t_4 = __pyx_f_10bytesparse_2_c_Block_Extend(__pyx_v_block, __pyx_v_block2); if (unlikely(__pyx_t_4 == ((Block_ *)NULL))) __PYX_ERR(0, 6133, __pyx_L1_error)
__pyx_v_block = __pyx_t_4;
+6134: Rack_Set__(blocks, block_index - 1, block) # update pointer
(void)(__pyx_f_10bytesparse_2_c_Rack_Set__(__pyx_v_blocks, (__pyx_v_block_index - 1), __pyx_v_block));
+6135: self._ = blocks = Rack_Pop_(blocks, block_index, NULL)
__pyx_t_2 = __pyx_f_10bytesparse_2_c_Rack_Pop_(__pyx_v_blocks, __pyx_v_block_index, NULL); if (unlikely(__pyx_t_2 == ((Rack_ *)NULL))) __PYX_ERR(0, 6135, __pyx_L1_error)
__pyx_v_self->_ = __pyx_t_2;
__pyx_v_blocks = __pyx_t_2;
+6136: return 0
__pyx_r = 0; goto __pyx_L0;
6137:
+6138: if block_index < Rack_Length(blocks):
__pyx_t_1 = ((__pyx_v_block_index < __pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks)) != 0); if (__pyx_t_1) { /* … */ goto __pyx_L11; }
+6139: block = Rack_Get_(blocks, block_index)
__pyx_t_4 = __pyx_f_10bytesparse_2_c_Rack_Get_(__pyx_v_blocks, __pyx_v_block_index); if (unlikely(__pyx_t_4 == ((Block_ *)NULL) && PyErr_Occurred())) __PYX_ERR(0, 6139, __pyx_L1_error) __pyx_v_block = __pyx_t_4;
+6140: block_start = Block_Start(block)
__pyx_v_block_start = __pyx_f_10bytesparse_2_c_Block_Start(__pyx_v_block);
6141:
+6142: if address < block_start:
__pyx_t_1 = ((__pyx_v_address < __pyx_v_block_start) != 0); if (__pyx_t_1) { /* … */ goto __pyx_L12; }
+6143: if shift_after:
__pyx_t_1 = (__pyx_v_shift_after != 0); if (__pyx_t_1) { /* … */ goto __pyx_L13; }
6144: # Insert a standalone block before
+6145: block = Block_Create(address, size, buffer)
__pyx_t_4 = __pyx_f_10bytesparse_2_c_Block_Create(__pyx_v_address, __pyx_v_size, __pyx_v_buffer); if (unlikely(__pyx_t_4 == ((Block_ *)NULL))) __PYX_ERR(0, 6145, __pyx_L1_error)
__pyx_v_block = __pyx_t_4;
+6146: try:
{ /*try:*/ { /* … */ } __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; goto __pyx_L19_try_end; __pyx_L14_error:; /* … */ __Pyx_XGIVEREF(__pyx_t_9); __Pyx_XGIVEREF(__pyx_t_10); __Pyx_XGIVEREF(__pyx_t_11); __Pyx_ExceptionReset(__pyx_t_9, __pyx_t_10, __pyx_t_11); goto __pyx_L1_error; __pyx_L19_try_end:; }
+6147: self._ = blocks = Rack_Insert(blocks, block_index, block)
__pyx_t_2 = __pyx_f_10bytesparse_2_c_Rack_Insert(__pyx_v_blocks, __pyx_v_block_index, __pyx_v_block); if (unlikely(__pyx_t_2 == ((Rack_ *)NULL))) __PYX_ERR(0, 6147, __pyx_L14_error)
__pyx_v_self->_ = __pyx_t_2;
__pyx_v_blocks = __pyx_t_2;
+6148: except:
/*except:*/ { __Pyx_AddTraceback("bytesparse._c.Memory._insert_", __pyx_clineno, __pyx_lineno, __pyx_filename); if (__Pyx_GetException(&__pyx_t_12, &__pyx_t_13, &__pyx_t_14) < 0) __PYX_ERR(0, 6148, __pyx_L16_except_error) __Pyx_GOTREF(__pyx_t_12); __Pyx_GOTREF(__pyx_t_13); __Pyx_GOTREF(__pyx_t_14);
+6149: Block_Free(block) # orphan
(void)(__pyx_f_10bytesparse_2_c_Block_Free(__pyx_v_block));
+6150: raise
__Pyx_GIVEREF(__pyx_t_12); __Pyx_GIVEREF(__pyx_t_13); __Pyx_XGIVEREF(__pyx_t_14); __Pyx_ErrRestoreWithState(__pyx_t_12, __pyx_t_13, __pyx_t_14); __pyx_t_12 = 0; __pyx_t_13 = 0; __pyx_t_14 = 0; __PYX_ERR(0, 6150, __pyx_L16_except_error) } __pyx_L16_except_error:;
6151: else:
+6152: CheckAddAddrU(address, size)
/*else*/ {
__pyx_t_8 = __pyx_f_10bytesparse_2_c_CheckAddAddrU(__pyx_v_address, __pyx_v_size); if (unlikely(__pyx_t_8 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 6152, __pyx_L1_error)
+6153: if address + size == block_start:
__pyx_t_1 = (((__pyx_v_address + __pyx_v_size) == __pyx_v_block_start) != 0); if (__pyx_t_1) { /* … */ goto __pyx_L22; }
6154: # Merge with next block
+6155: block = Rack_Get_(blocks, block_index)
__pyx_t_4 = __pyx_f_10bytesparse_2_c_Rack_Get_(__pyx_v_blocks, __pyx_v_block_index); if (unlikely(__pyx_t_4 == ((Block_ *)NULL) && PyErr_Occurred())) __PYX_ERR(0, 6155, __pyx_L1_error) __pyx_v_block = __pyx_t_4;
+6156: block.address = address
__pyx_v_block->address = __pyx_v_address;
+6157: block = Block_ExtendLeft_(block, size, buffer)
__pyx_t_4 = __pyx_f_10bytesparse_2_c_Block_ExtendLeft_(__pyx_v_block, __pyx_v_size, __pyx_v_buffer); if (unlikely(__pyx_t_4 == ((Block_ *)NULL))) __PYX_ERR(0, 6157, __pyx_L1_error)
__pyx_v_block = __pyx_t_4;
+6158: Rack_Set__(blocks, block_index, block) # update pointer
(void)(__pyx_f_10bytesparse_2_c_Rack_Set__(__pyx_v_blocks, __pyx_v_block_index, __pyx_v_block));
6159: else:
6160: # Insert a standalone block before
+6161: block = Block_Create(address, size, buffer)
/*else*/ {
__pyx_t_4 = __pyx_f_10bytesparse_2_c_Block_Create(__pyx_v_address, __pyx_v_size, __pyx_v_buffer); if (unlikely(__pyx_t_4 == ((Block_ *)NULL))) __PYX_ERR(0, 6161, __pyx_L1_error)
__pyx_v_block = __pyx_t_4;
+6162: try:
{ /*try:*/ { /* … */ } __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; goto __pyx_L28_try_end; __pyx_L23_error:; __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; /* … */ __Pyx_XGIVEREF(__pyx_t_11); __Pyx_XGIVEREF(__pyx_t_10); __Pyx_XGIVEREF(__pyx_t_9); __Pyx_ExceptionReset(__pyx_t_11, __pyx_t_10, __pyx_t_9); goto __pyx_L1_error; __pyx_L28_try_end:; } } __pyx_L22:; } __pyx_L13:;
+6163: self._ = blocks = Rack_Insert(blocks, block_index, block)
__pyx_t_2 = __pyx_f_10bytesparse_2_c_Rack_Insert(__pyx_v_blocks, __pyx_v_block_index, __pyx_v_block); if (unlikely(__pyx_t_2 == ((Rack_ *)NULL))) __PYX_ERR(0, 6163, __pyx_L23_error)
__pyx_v_self->_ = __pyx_t_2;
__pyx_v_blocks = __pyx_t_2;
+6164: except:
/*except:*/ { __Pyx_AddTraceback("bytesparse._c.Memory._insert_", __pyx_clineno, __pyx_lineno, __pyx_filename); if (__Pyx_GetException(&__pyx_t_14, &__pyx_t_13, &__pyx_t_12) < 0) __PYX_ERR(0, 6164, __pyx_L25_except_error) __Pyx_GOTREF(__pyx_t_14); __Pyx_GOTREF(__pyx_t_13); __Pyx_GOTREF(__pyx_t_12);
+6165: Block_Free(block) # orphan
(void)(__pyx_f_10bytesparse_2_c_Block_Free(__pyx_v_block));
+6166: raise
__Pyx_GIVEREF(__pyx_t_14); __Pyx_GIVEREF(__pyx_t_13); __Pyx_XGIVEREF(__pyx_t_12); __Pyx_ErrRestoreWithState(__pyx_t_14, __pyx_t_13, __pyx_t_12); __pyx_t_14 = 0; __pyx_t_13 = 0; __pyx_t_12 = 0; __PYX_ERR(0, 6166, __pyx_L25_except_error) } __pyx_L25_except_error:;
6167: else:
6168: # Insert buffer into the current block
+6169: CheckSubAddrU(address, block_start)
/*else*/ {
__pyx_t_8 = __pyx_f_10bytesparse_2_c_CheckSubAddrU(__pyx_v_address, __pyx_v_block_start); if (unlikely(__pyx_t_8 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 6169, __pyx_L1_error)
+6170: CheckAddrToSizeU(address - block_start)
__pyx_t_8 = __pyx_f_10bytesparse_2_c_CheckAddrToSizeU((__pyx_v_address - __pyx_v_block_start)); if (unlikely(__pyx_t_8 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 6170, __pyx_L1_error)
+6171: offset = <size_t>(address - block_start)
__pyx_v_offset = ((size_t)(__pyx_v_address - __pyx_v_block_start));
+6172: block = Block_Reserve_(block, offset, size, False)
__pyx_t_4 = __pyx_f_10bytesparse_2_c_Block_Reserve_(__pyx_v_block, __pyx_v_offset, __pyx_v_size, 0); if (unlikely(__pyx_t_4 == ((Block_ *)NULL))) __PYX_ERR(0, 6172, __pyx_L1_error)
__pyx_v_block = __pyx_t_4;
+6173: block = Block_Write_(block, offset, size, buffer)
__pyx_t_4 = __pyx_f_10bytesparse_2_c_Block_Write_(__pyx_v_block, __pyx_v_offset, __pyx_v_size, __pyx_v_buffer); if (unlikely(__pyx_t_4 == ((Block_ *)NULL))) __PYX_ERR(0, 6173, __pyx_L1_error)
__pyx_v_block = __pyx_t_4;
+6174: Rack_Set__(blocks, block_index, block) # update pointer
(void)(__pyx_f_10bytesparse_2_c_Rack_Set__(__pyx_v_blocks, __pyx_v_block_index, __pyx_v_block)); } __pyx_L12:;
6175:
6176: # Shift blocks after
+6177: if shift_after:
__pyx_t_1 = (__pyx_v_shift_after != 0); if (__pyx_t_1) { /* … */ }
+6178: for block_index in range(block_index + 1, Rack_Length(blocks)):
__pyx_t_5 = __pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks); __pyx_t_6 = __pyx_t_5; for (__pyx_t_7 = (__pyx_v_block_index + 1); __pyx_t_7 < __pyx_t_6; __pyx_t_7+=1) { __pyx_v_block_index = __pyx_t_7;
+6179: block = Rack_Get__(blocks, block_index)
__pyx_v_block = __pyx_f_10bytesparse_2_c_Rack_Get__(__pyx_v_blocks, __pyx_v_block_index);
+6180: CheckAddAddrU(block.address, size)
__pyx_t_8 = __pyx_f_10bytesparse_2_c_CheckAddAddrU(__pyx_v_block->address, __pyx_v_size); if (unlikely(__pyx_t_8 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 6180, __pyx_L1_error)
+6181: block.address += size
__pyx_v_block->address = (__pyx_v_block->address + __pyx_v_size); }
6182:
6183: else:
6184: # Append a standalone block after
+6185: block = Block_Create(address, size, buffer)
/*else*/ {
__pyx_t_4 = __pyx_f_10bytesparse_2_c_Block_Create(__pyx_v_address, __pyx_v_size, __pyx_v_buffer); if (unlikely(__pyx_t_4 == ((Block_ *)NULL))) __PYX_ERR(0, 6185, __pyx_L1_error)
__pyx_v_block = __pyx_t_4;
+6186: try:
{ /*try:*/ { /* … */ } __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; goto __pyx_L39_try_end; __pyx_L34_error:; __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; /* … */ __Pyx_XGIVEREF(__pyx_t_9); __Pyx_XGIVEREF(__pyx_t_10); __Pyx_XGIVEREF(__pyx_t_11); __Pyx_ExceptionReset(__pyx_t_9, __pyx_t_10, __pyx_t_11); goto __pyx_L1_error; __pyx_L39_try_end:; } } __pyx_L11:;
+6187: self._ = blocks = Rack_Append(blocks, block)
__pyx_t_2 = __pyx_f_10bytesparse_2_c_Rack_Append(__pyx_v_blocks, __pyx_v_block); if (unlikely(__pyx_t_2 == ((Rack_ *)NULL))) __PYX_ERR(0, 6187, __pyx_L34_error)
__pyx_v_self->_ = __pyx_t_2;
__pyx_v_blocks = __pyx_t_2;
+6188: except:
/*except:*/ { __Pyx_AddTraceback("bytesparse._c.Memory._insert_", __pyx_clineno, __pyx_lineno, __pyx_filename); if (__Pyx_GetException(&__pyx_t_12, &__pyx_t_13, &__pyx_t_14) < 0) __PYX_ERR(0, 6188, __pyx_L36_except_error) __Pyx_GOTREF(__pyx_t_12); __Pyx_GOTREF(__pyx_t_13); __Pyx_GOTREF(__pyx_t_14);
+6189: Block_Free(block) # orphan
(void)(__pyx_f_10bytesparse_2_c_Block_Free(__pyx_v_block));
+6190: raise
__Pyx_GIVEREF(__pyx_t_12); __Pyx_GIVEREF(__pyx_t_13); __Pyx_XGIVEREF(__pyx_t_14); __Pyx_ErrRestoreWithState(__pyx_t_12, __pyx_t_13, __pyx_t_14); __pyx_t_12 = 0; __pyx_t_13 = 0; __pyx_t_14 = 0; __PYX_ERR(0, 6190, __pyx_L36_except_error) } __pyx_L36_except_error:;
6191:
+6192: def _insert(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_91_insert(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_90_insert[] = "Memory._insert(self: u'Memory', address: Address, bytearray data: bytearray, shift_after: bool) -> None\nInserts data.\n\n Low-level method to insert data into the underlying data structure.\n\n Arguments:\n address (int):\n Address of the insertion point.\n\n data (:obj:`bytearray`):\n Data to insert.\n\n shift_after (bool):\n Shifts the addresses of blocks after the insertion point,\n adding the size of the inserted data.\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_91_insert(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_address = 0; PyObject *__pyx_v_data = 0; PyObject *__pyx_v_shift_after = 0; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("_insert (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_address,&__pyx_n_s_data,&__pyx_n_s_shift_after,0}; PyObject* values[3] = {0,0,0}; if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_address)) != 0)) kw_args--; else goto __pyx_L5_argtuple_error; CYTHON_FALLTHROUGH; case 1: if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_data)) != 0)) kw_args--; else { __Pyx_RaiseArgtupleInvalid("_insert", 1, 3, 3, 1); __PYX_ERR(0, 6192, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 2: if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_shift_after)) != 0)) kw_args--; else { __Pyx_RaiseArgtupleInvalid("_insert", 1, 3, 3, 2); __PYX_ERR(0, 6192, __pyx_L3_error) } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "_insert") < 0)) __PYX_ERR(0, 6192, __pyx_L3_error) } } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { goto __pyx_L5_argtuple_error; } else { values[0] = PyTuple_GET_ITEM(__pyx_args, 0); values[1] = PyTuple_GET_ITEM(__pyx_args, 1); values[2] = PyTuple_GET_ITEM(__pyx_args, 2); } __pyx_v_address = values[0]; __pyx_v_data = ((PyObject*)values[1]); __pyx_v_shift_after = values[2]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("_insert", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 6192, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("bytesparse._c.Memory._insert", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_data), (&PyByteArray_Type), 1, "data", 1))) __PYX_ERR(0, 6195, __pyx_L1_error) __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_90_insert(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), __pyx_v_address, __pyx_v_data, __pyx_v_shift_after); int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; /* function exit code */ goto __pyx_L0; __pyx_L1_error:; __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_90_insert(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_address, PyObject *__pyx_v_data, PyObject *__pyx_v_shift_after) { size_t __pyx_v_size; __Pyx_memviewslice __pyx_v_view = { 0, 0, { 0 }, { 0 }, { 0 } }; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("_insert", 0); /* … */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; __pyx_L1_error:; __PYX_XDEC_MEMVIEW(&__pyx_t_1, 1); __Pyx_XDECREF(__pyx_t_4); __Pyx_AddTraceback("bytesparse._c.Memory._insert", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __PYX_XDEC_MEMVIEW(&__pyx_v_view, 1); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
6193: self: 'Memory',
6194: address: Address,
6195: data: bytearray,
6196: shift_after: bool,
6197: ) -> None:
6198: r"""Inserts data.
6199:
6200: Low-level method to insert data into the underlying data structure.
6201:
6202: Arguments:
6203: address (int):
6204: Address of the insertion point.
6205:
6206: data (:obj:`bytearray`):
6207: Data to insert.
6208:
6209: shift_after (bool):
6210: Shifts the addresses of blocks after the insertion point,
6211: adding the size of the inserted data.
6212: """
6213: cdef:
6214: size_t size
6215: const byte_t[:] view
6216:
+6217: view = data
__pyx_t_1 = __Pyx_PyObject_to_MemoryviewSlice_ds_nn_byte_t__const__(__pyx_v_data, 0); if (unlikely(!__pyx_t_1.memview)) __PYX_ERR(0, 6217, __pyx_L1_error) __pyx_v_view = __pyx_t_1; __pyx_t_1.memview = NULL; __pyx_t_1.data = NULL;
+6218: size = len(view)
__pyx_t_2 = __Pyx_MemoryView_Len(__pyx_v_view);
__pyx_v_size = __pyx_t_2;
+6219: if size > SIZE_HMAX:
__pyx_t_3 = ((__pyx_v_size > SIZE_HMAX) != 0); if (unlikely(__pyx_t_3)) { /* … */ }
+6220: raise OverflowError('data size')
__pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_OverflowError, __pyx_tuple__27, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 6220, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_Raise(__pyx_t_4, 0, 0, 0); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __PYX_ERR(0, 6220, __pyx_L1_error) /* … */ __pyx_tuple__27 = PyTuple_Pack(1, __pyx_kp_u_data_size); if (unlikely(!__pyx_tuple__27)) __PYX_ERR(0, 6220, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__27); __Pyx_GIVEREF(__pyx_tuple__27);
6221:
+6222: self._insert_(<addr_t>address, size, &view[0], <bint>shift_after)
__pyx_t_5 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_address); if (unlikely((__pyx_t_5 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 6222, __pyx_L1_error) __pyx_t_6 = 0; __pyx_t_7 = -1; if (__pyx_t_6 < 0) { __pyx_t_6 += __pyx_v_view.shape[0]; if (unlikely(__pyx_t_6 < 0)) __pyx_t_7 = 0; } else if (unlikely(__pyx_t_6 >= __pyx_v_view.shape[0])) __pyx_t_7 = 0; if (unlikely(__pyx_t_7 != -1)) { __Pyx_RaiseBufferIndexError(__pyx_t_7); __PYX_ERR(0, 6222, __pyx_L1_error) } __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_v_shift_after); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 6222, __pyx_L1_error) __pyx_t_8 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_insert_(__pyx_v_self, ((addr_t)__pyx_t_5), __pyx_v_size, (&(*((byte_t const *) ( /* dim=0 */ (__pyx_v_view.data + __pyx_t_6 * __pyx_v_view.strides[0]) )))), __pyx_t_3); if (unlikely(__pyx_t_8 == ((int)-1))) __PYX_ERR(0, 6222, __pyx_L1_error)
6223:
+6224: cdef vint _erase_(self, addr_t start, addr_t endex, bint shift_after, bint merge_deletion) except -1:
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_6Memory__erase_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, addr_t __pyx_v_start, addr_t __pyx_v_endex, int __pyx_v_shift_after, int __pyx_v_merge_deletion) { addr_t __pyx_v_size; addr_t __pyx_v_offset; Rack_ *__pyx_v_blocks; size_t __pyx_v_block_index; size_t __pyx_v_inner_start; size_t __pyx_v_inner_endex; Block_ *__pyx_v_block; addr_t __pyx_v_block_start; addr_t __pyx_v_block_endex; Block_ *__pyx_v_block2; addr_t __pyx_v_block_start2; __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("_erase_", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_12); __Pyx_XDECREF(__pyx_t_13); __Pyx_XDECREF(__pyx_t_14); __Pyx_AddTraceback("bytesparse._c.Memory._erase_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
6225: cdef:
6226: addr_t size
6227: addr_t offset
6228:
+6229: Rack_* blocks = self._
__pyx_t_1 = __pyx_v_self->_; __pyx_v_blocks = __pyx_t_1;
6230: size_t block_index
6231: size_t inner_start
6232: size_t inner_endex
6233:
+6234: Block_* block = NULL
__pyx_v_block = NULL;
6235: addr_t block_start
6236: addr_t block_endex
6237:
+6238: Block_* block2 = NULL
__pyx_v_block2 = NULL;
6239: addr_t block_start2
6240:
+6241: if endex > start:
__pyx_t_2 = ((__pyx_v_endex > __pyx_v_start) != 0); if (__pyx_t_2) { /* … */ }
+6242: size = endex - start
__pyx_v_size = (__pyx_v_endex - __pyx_v_start);
+6243: block_index = Rack_IndexStart(blocks, start)
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Rack_IndexStart(__pyx_v_blocks, __pyx_v_start); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-2L))) __PYX_ERR(0, 6243, __pyx_L1_error)
__pyx_v_block_index = __pyx_t_3;
6244:
6245: # Delete final/inner part of deletion start block
+6246: for block_index in range(block_index, Rack_Length(blocks)):
__pyx_t_4 = __pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks); __pyx_t_5 = __pyx_t_4; for (__pyx_t_6 = __pyx_v_block_index; __pyx_t_6 < __pyx_t_5; __pyx_t_6+=1) { __pyx_v_block_index = __pyx_t_6;
+6247: block = Rack_Get__(blocks, block_index)
__pyx_v_block = __pyx_f_10bytesparse_2_c_Rack_Get__(__pyx_v_blocks, __pyx_v_block_index);
6248:
+6249: block_start = Block_Start(block)
__pyx_v_block_start = __pyx_f_10bytesparse_2_c_Block_Start(__pyx_v_block);
+6250: if start <= block_start:
__pyx_t_2 = ((__pyx_v_start <= __pyx_v_block_start) != 0); if (__pyx_t_2) { /* … */ }
+6251: break # inner starts here
goto __pyx_L5_break;
6252:
+6253: block_endex = Block_Endex(block)
__pyx_v_block_endex = __pyx_f_10bytesparse_2_c_Block_Endex(__pyx_v_block);
+6254: if start < block_endex:
__pyx_t_2 = ((__pyx_v_start < __pyx_v_block_endex) != 0); if (__pyx_t_2) { /* … */ } } /*else*/ {
+6255: if shift_after:
__pyx_t_2 = (__pyx_v_shift_after != 0); if (__pyx_t_2) { /* … */ goto __pyx_L8; }
+6256: CheckAddrToSizeU(start - block_start)
__pyx_t_7 = __pyx_f_10bytesparse_2_c_CheckAddrToSizeU((__pyx_v_start - __pyx_v_block_start)); if (unlikely(__pyx_t_7 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 6256, __pyx_L1_error)
+6257: CheckAddrToSizeU(endex - block_start)
__pyx_t_7 = __pyx_f_10bytesparse_2_c_CheckAddrToSizeU((__pyx_v_endex - __pyx_v_block_start)); if (unlikely(__pyx_t_7 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 6257, __pyx_L1_error)
+6258: block = Block_DelSlice_(block, start - block_start, endex - block_start)
__pyx_t_8 = __pyx_f_10bytesparse_2_c_Block_DelSlice_(__pyx_v_block, (__pyx_v_start - __pyx_v_block_start), (__pyx_v_endex - __pyx_v_block_start)); if (unlikely(__pyx_t_8 == ((Block_ *)NULL))) __PYX_ERR(0, 6258, __pyx_L1_error)
__pyx_v_block = __pyx_t_8;
+6259: Rack_Set__(blocks, block_index, block) # update pointer
(void)(__pyx_f_10bytesparse_2_c_Rack_Set__(__pyx_v_blocks, __pyx_v_block_index, __pyx_v_block));
6260: else:
+6261: try:
/*else*/ { { /*try:*/ { /* … */ } __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; goto __pyx_L16_try_end; __pyx_L9_error:; /* … */ __Pyx_XGIVEREF(__pyx_t_9); __Pyx_XGIVEREF(__pyx_t_10); __Pyx_XGIVEREF(__pyx_t_11); __Pyx_ExceptionReset(__pyx_t_9, __pyx_t_10, __pyx_t_11); goto __pyx_L1_error; __pyx_L16_try_end:; } } __pyx_L8:;
+6262: CheckAddrToSizeU(start - block_start)
__pyx_t_7 = __pyx_f_10bytesparse_2_c_CheckAddrToSizeU((__pyx_v_start - __pyx_v_block_start)); if (unlikely(__pyx_t_7 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 6262, __pyx_L9_error)
+6263: block = Block_GetSlice_(block, 0, start - block_start)
__pyx_t_8 = __pyx_f_10bytesparse_2_c_Block_GetSlice_(__pyx_v_block, 0, (__pyx_v_start - __pyx_v_block_start)); if (unlikely(__pyx_t_8 == ((Block_ *)NULL))) __PYX_ERR(0, 6263, __pyx_L9_error)
__pyx_v_block = __pyx_t_8;
+6264: block.address = block_start
__pyx_v_block->address = __pyx_v_block_start;
+6265: self._ = blocks = Rack_Insert_(blocks, block_index, block)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_Rack_Insert_(__pyx_v_blocks, __pyx_v_block_index, __pyx_v_block); if (unlikely(__pyx_t_1 == ((Rack_ *)NULL))) __PYX_ERR(0, 6265, __pyx_L9_error)
__pyx_v_self->_ = __pyx_t_1;
__pyx_v_blocks = __pyx_t_1;
+6266: except:
/*except:*/ { __Pyx_AddTraceback("bytesparse._c.Memory._erase_", __pyx_clineno, __pyx_lineno, __pyx_filename); if (__Pyx_GetException(&__pyx_t_12, &__pyx_t_13, &__pyx_t_14) < 0) __PYX_ERR(0, 6266, __pyx_L11_except_error) __Pyx_GOTREF(__pyx_t_12); __Pyx_GOTREF(__pyx_t_13); __Pyx_GOTREF(__pyx_t_14);
+6267: block = Block_Free(block) # orphan
__pyx_v_block = __pyx_f_10bytesparse_2_c_Block_Free(__pyx_v_block);
+6268: raise
__Pyx_GIVEREF(__pyx_t_12); __Pyx_GIVEREF(__pyx_t_13); __Pyx_XGIVEREF(__pyx_t_14); __Pyx_ErrRestoreWithState(__pyx_t_12, __pyx_t_13, __pyx_t_14); __pyx_t_12 = 0; __pyx_t_13 = 0; __pyx_t_14 = 0; __PYX_ERR(0, 6268, __pyx_L11_except_error) } __pyx_L11_except_error:;
+6269: block_index += 1 # skip this from inner part
__pyx_v_block_index = (__pyx_v_block_index + 1);
+6270: break
goto __pyx_L5_break;
6271: else:
+6272: block_index = Rack_Length(blocks)
__pyx_v_block_index = __pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks); } __pyx_L5_break:;
6273:
6274: # Delete initial part of deletion end block
+6275: inner_start = block_index
__pyx_v_inner_start = __pyx_v_block_index;
+6276: for block_index in range(block_index, Rack_Length(blocks)):
__pyx_t_4 = __pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks); __pyx_t_5 = __pyx_t_4; for (__pyx_t_6 = __pyx_v_block_index; __pyx_t_6 < __pyx_t_5; __pyx_t_6+=1) { __pyx_v_block_index = __pyx_t_6;
+6277: block = Rack_Get__(blocks, block_index)
__pyx_v_block = __pyx_f_10bytesparse_2_c_Rack_Get__(__pyx_v_blocks, __pyx_v_block_index);
6278:
+6279: block_start = Block_Start(block)
__pyx_v_block_start = __pyx_f_10bytesparse_2_c_Block_Start(__pyx_v_block);
+6280: if endex <= block_start:
__pyx_t_2 = ((__pyx_v_endex <= __pyx_v_block_start) != 0); if (__pyx_t_2) { /* … */ }
+6281: break # inner ends before here
goto __pyx_L20_break;
6282:
+6283: block_endex = Block_Endex(block)
__pyx_v_block_endex = __pyx_f_10bytesparse_2_c_Block_Endex(__pyx_v_block);
+6284: if endex < block_endex:
__pyx_t_2 = ((__pyx_v_endex < __pyx_v_block_endex) != 0); if (__pyx_t_2) { /* … */ } } /*else*/ {
+6285: offset = endex - block_start
__pyx_v_offset = (__pyx_v_endex - __pyx_v_block_start);
+6286: CheckAddrToSizeU(offset)
__pyx_t_7 = __pyx_f_10bytesparse_2_c_CheckAddrToSizeU(__pyx_v_offset); if (unlikely(__pyx_t_7 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 6286, __pyx_L1_error)
+6287: CheckAddAddrU(block.address, offset)
__pyx_t_7 = __pyx_f_10bytesparse_2_c_CheckAddAddrU(__pyx_v_block->address, __pyx_v_offset); if (unlikely(__pyx_t_7 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 6287, __pyx_L1_error)
+6288: block = Block_DelSlice_(block, 0, <size_t>offset)
__pyx_t_8 = __pyx_f_10bytesparse_2_c_Block_DelSlice_(__pyx_v_block, 0, ((size_t)__pyx_v_offset)); if (unlikely(__pyx_t_8 == ((Block_ *)NULL))) __PYX_ERR(0, 6288, __pyx_L1_error)
__pyx_v_block = __pyx_t_8;
+6289: block.address += offset # update address
__pyx_v_block->address = (__pyx_v_block->address + __pyx_v_offset);
+6290: Rack_Set__(blocks, block_index, block) # update pointer
(void)(__pyx_f_10bytesparse_2_c_Rack_Set__(__pyx_v_blocks, __pyx_v_block_index, __pyx_v_block));
+6291: break # inner ends before here
goto __pyx_L20_break;
6292: else:
+6293: block_index = Rack_Length(blocks)
__pyx_v_block_index = __pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks); } __pyx_L20_break:;
+6294: inner_endex = block_index
__pyx_v_inner_endex = __pyx_v_block_index;
6295:
+6296: if merge_deletion:
__pyx_t_2 = (__pyx_v_merge_deletion != 0); if (__pyx_t_2) { /* … */ }
6297: # Check if inner deletion can be merged
+6298: if inner_start and inner_endex < Rack_Length(blocks):
__pyx_t_15 = (__pyx_v_inner_start != 0); if (__pyx_t_15) { } else { __pyx_t_2 = __pyx_t_15; goto __pyx_L25_bool_binop_done; } __pyx_t_15 = ((__pyx_v_inner_endex < __pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks)) != 0); __pyx_t_2 = __pyx_t_15; __pyx_L25_bool_binop_done:; if (__pyx_t_2) { /* … */ }
+6299: block = Rack_Get__(blocks, inner_start - 1)
__pyx_v_block = __pyx_f_10bytesparse_2_c_Rack_Get__(__pyx_v_blocks, (__pyx_v_inner_start - 1));
+6300: block_endex = Block_Endex(block)
__pyx_v_block_endex = __pyx_f_10bytesparse_2_c_Block_Endex(__pyx_v_block);
6301:
+6302: block2 = Rack_Get__(blocks, inner_endex)
__pyx_v_block2 = __pyx_f_10bytesparse_2_c_Rack_Get__(__pyx_v_blocks, __pyx_v_inner_endex);
+6303: block_start2 = Block_Start(block2)
__pyx_v_block_start2 = __pyx_f_10bytesparse_2_c_Block_Start(__pyx_v_block2);
6304:
+6305: if block_endex + size == block_start2:
__pyx_t_2 = (((__pyx_v_block_endex + __pyx_v_size) == __pyx_v_block_start2) != 0); if (__pyx_t_2) { /* … */ }
+6306: block = Block_Extend(block, block2) # merge deletion boundaries
__pyx_t_8 = __pyx_f_10bytesparse_2_c_Block_Extend(__pyx_v_block, __pyx_v_block2); if (unlikely(__pyx_t_8 == ((Block_ *)NULL))) __PYX_ERR(0, 6306, __pyx_L1_error)
__pyx_v_block = __pyx_t_8;
+6307: Rack_Set__(blocks, inner_start - 1, block) # update pointer
(void)(__pyx_f_10bytesparse_2_c_Rack_Set__(__pyx_v_blocks, (__pyx_v_inner_start - 1), __pyx_v_block));
+6308: inner_endex += 1 # add to inner deletion
__pyx_v_inner_endex = (__pyx_v_inner_endex + 1);
+6309: block_index += 1 # skip address update
__pyx_v_block_index = (__pyx_v_block_index + 1);
6310:
+6311: if shift_after:
__pyx_t_2 = (__pyx_v_shift_after != 0); if (__pyx_t_2) { /* … */ }
6312: # Shift blocks after deletion
+6313: for block_index in range(block_index, Rack_Length(blocks)):
__pyx_t_4 = __pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks); __pyx_t_5 = __pyx_t_4; for (__pyx_t_6 = __pyx_v_block_index; __pyx_t_6 < __pyx_t_5; __pyx_t_6+=1) { __pyx_v_block_index = __pyx_t_6;
+6314: block = Rack_Get__(blocks, block_index)
__pyx_v_block = __pyx_f_10bytesparse_2_c_Rack_Get__(__pyx_v_blocks, __pyx_v_block_index);
+6315: CheckSubAddrU(block.address, size)
__pyx_t_7 = __pyx_f_10bytesparse_2_c_CheckSubAddrU(__pyx_v_block->address, __pyx_v_size); if (unlikely(__pyx_t_7 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 6315, __pyx_L1_error)
+6316: block.address -= size # update address
__pyx_v_block->address = (__pyx_v_block->address - __pyx_v_size); }
6317:
6318: # Delete inner full blocks
+6319: if inner_start < inner_endex:
__pyx_t_2 = ((__pyx_v_inner_start < __pyx_v_inner_endex) != 0); if (__pyx_t_2) { /* … */ }
+6320: self._ = blocks = Rack_DelSlice_(blocks, inner_start, inner_endex)
__pyx_t_1 = __pyx_f_10bytesparse_2_c_Rack_DelSlice_(__pyx_v_blocks, __pyx_v_inner_start, __pyx_v_inner_endex); if (unlikely(__pyx_t_1 == ((Rack_ *)NULL))) __PYX_ERR(0, 6320, __pyx_L1_error)
__pyx_v_self->_ = __pyx_t_1;
__pyx_v_blocks = __pyx_t_1;
6321:
+6322: def _erase(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_93_erase(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_92_erase[] = "Memory._erase(self: u'Memory', start: Address, endex: Address, shift_after: bool, merge_deletion: bool) -> None\nErases an address range.\n\n Low-level method to erase data within the underlying data structure.\n\n Arguments:\n start (int):\n Start address of the erasure range.\n\n endex (int):\n Exclusive end address of the erasure range.\n\n shift_after (bool):\n Shifts addresses of blocks after the end of the range,\n subtracting the size of the range itself.\n\n merge_deletion (bool):\n If data blocks before and after the address range are\n contiguous after erasure, merge the two blocks together.\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_93_erase(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_start = 0; PyObject *__pyx_v_endex = 0; PyObject *__pyx_v_shift_after = 0; PyObject *__pyx_v_merge_deletion = 0; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("_erase (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_start,&__pyx_n_s_endex,&__pyx_n_s_shift_after,&__pyx_n_s_merge_deletion,0}; PyObject* values[4] = {0,0,0,0}; if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); CYTHON_FALLTHROUGH; case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_start)) != 0)) kw_args--; else goto __pyx_L5_argtuple_error; CYTHON_FALLTHROUGH; case 1: if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_endex)) != 0)) kw_args--; else { __Pyx_RaiseArgtupleInvalid("_erase", 1, 4, 4, 1); __PYX_ERR(0, 6322, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 2: if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_shift_after)) != 0)) kw_args--; else { __Pyx_RaiseArgtupleInvalid("_erase", 1, 4, 4, 2); __PYX_ERR(0, 6322, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 3: if (likely((values[3] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_merge_deletion)) != 0)) kw_args--; else { __Pyx_RaiseArgtupleInvalid("_erase", 1, 4, 4, 3); __PYX_ERR(0, 6322, __pyx_L3_error) } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "_erase") < 0)) __PYX_ERR(0, 6322, __pyx_L3_error) } } else if (PyTuple_GET_SIZE(__pyx_args) != 4) { goto __pyx_L5_argtuple_error; } else { values[0] = PyTuple_GET_ITEM(__pyx_args, 0); values[1] = PyTuple_GET_ITEM(__pyx_args, 1); values[2] = PyTuple_GET_ITEM(__pyx_args, 2); values[3] = PyTuple_GET_ITEM(__pyx_args, 3); } __pyx_v_start = values[0]; __pyx_v_endex = values[1]; __pyx_v_shift_after = values[2]; __pyx_v_merge_deletion = values[3]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("_erase", 1, 4, 4, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 6322, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("bytesparse._c.Memory._erase", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_92_erase(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), __pyx_v_start, __pyx_v_endex, __pyx_v_shift_after, __pyx_v_merge_deletion); int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_92_erase(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_start, PyObject *__pyx_v_endex, PyObject *__pyx_v_shift_after, PyObject *__pyx_v_merge_deletion) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("_erase", 0); /* … */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Memory._erase", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
6323: self: 'Memory',
6324: start: Address,
6325: endex: Address,
6326: shift_after: bool,
6327: merge_deletion: bool,
6328: ) -> None:
6329: r"""Erases an address range.
6330:
6331: Low-level method to erase data within the underlying data structure.
6332:
6333: Arguments:
6334: start (int):
6335: Start address of the erasure range.
6336:
6337: endex (int):
6338: Exclusive end address of the erasure range.
6339:
6340: shift_after (bool):
6341: Shifts addresses of blocks after the end of the range,
6342: subtracting the size of the range itself.
6343:
6344: merge_deletion (bool):
6345: If data blocks before and after the address range are
6346: contiguous after erasure, merge the two blocks together.
6347: """
6348:
+6349: self._erase_(<addr_t>start, <addr_t>endex, <bint>shift_after, <bint>merge_deletion)
__pyx_t_1 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_start); if (unlikely((__pyx_t_1 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 6349, __pyx_L1_error) __pyx_t_2 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_endex); if (unlikely((__pyx_t_2 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 6349, __pyx_L1_error) __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_v_shift_after); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 6349, __pyx_L1_error) __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_v_merge_deletion); if (unlikely(__pyx_t_4 < 0)) __PYX_ERR(0, 6349, __pyx_L1_error) __pyx_t_5 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_erase_(__pyx_v_self, ((addr_t)__pyx_t_1), ((addr_t)__pyx_t_2), __pyx_t_3, __pyx_t_4); if (unlikely(__pyx_t_5 == ((int)-1))) __PYX_ERR(0, 6349, __pyx_L1_error)
6350:
+6351: cdef vint insert_same_(self, addr_t address, Memory data, list backups) except -1:
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_6Memory_insert_same_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, CYTHON_UNUSED addr_t __pyx_v_address, struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_data, PyObject *__pyx_v_backups) { addr_t __pyx_v_data_start; addr_t __pyx_v_data_endex; __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("insert_same_", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Memory.insert_same_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
6352: cdef:
6353: addr_t data_start
6354: addr_t data_endex
6355:
+6356: data_start = data.start_()
__pyx_v_data_start = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_data->__pyx_vtab)->start_(__pyx_v_data);
+6357: data_endex = data.endex_()
__pyx_v_data_endex = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_data->__pyx_vtab)->endex_(__pyx_v_data);
6358:
+6359: if data_start < data_endex:
__pyx_t_1 = ((__pyx_v_data_start < __pyx_v_data_endex) != 0); if (__pyx_t_1) { /* … */ }
+6360: self.reserve_(data_start, data_endex, backups)
__pyx_t_2 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->reserve_(__pyx_v_self, __pyx_v_data_start, __pyx_v_data_endex, __pyx_v_backups); if (unlikely(__pyx_t_2 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 6360, __pyx_L1_error)
+6361: self.write_same_(data_start, data, False, backups)
__pyx_t_2 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->write_same_(__pyx_v_self, __pyx_v_data_start, __pyx_v_data, 0, __pyx_v_backups); if (unlikely(__pyx_t_2 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 6361, __pyx_L1_error)
6362:
+6363: cdef vint insert_raw_(self, addr_t address, size_t data_size, const byte_t* data_ptr, list backups) except -1:
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_6Memory_insert_raw_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, addr_t __pyx_v_address, size_t __pyx_v_data_size, byte_t const *__pyx_v_data_ptr, CYTHON_UNUSED PyObject *__pyx_v_backups) { __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("insert_raw_", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_4); __Pyx_XDECREF(__pyx_t_5); __Pyx_XDECREF(__pyx_t_6); __Pyx_XDECREF(__pyx_t_8); __Pyx_AddTraceback("bytesparse._c.Memory.insert_raw_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
6364:
+6365: self._insert_(address, data_size, data_ptr, True) # TODO: backups
__pyx_t_1 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_insert_(__pyx_v_self, __pyx_v_address, __pyx_v_data_size, __pyx_v_data_ptr, 1); if (unlikely(__pyx_t_1 == ((int)-1))) __PYX_ERR(0, 6365, __pyx_L1_error)
6366:
+6367: if data_size:
__pyx_t_1 = (__pyx_v_data_size != 0); if (__pyx_t_1) { /* … */ }
+6368: self._crop(self._trim_start, self._trim_endex, None) # TODO: pre-trimming
__pyx_t_3 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_crop); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 6368, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_4 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_self->_trim_start); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 6368, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __pyx_t_5 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_self->_trim_endex); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 6368, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __pyx_t_6 = NULL; __pyx_t_7 = 0; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) { __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_3); if (likely(__pyx_t_6)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); __Pyx_INCREF(__pyx_t_6); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_3, function); __pyx_t_7 = 1; } } #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_3)) { PyObject *__pyx_temp[4] = {__pyx_t_6, __pyx_t_4, __pyx_t_5, Py_None}; __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_7, 3+__pyx_t_7); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 6368, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { PyObject *__pyx_temp[4] = {__pyx_t_6, __pyx_t_4, __pyx_t_5, Py_None}; __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_7, 3+__pyx_t_7); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 6368, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; } else #endif { __pyx_t_8 = PyTuple_New(3+__pyx_t_7); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 6368, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_8); if (__pyx_t_6) { __Pyx_GIVEREF(__pyx_t_6); PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_6); __pyx_t_6 = NULL; } __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_8, 0+__pyx_t_7, __pyx_t_4); __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_8, 1+__pyx_t_7, __pyx_t_5); __Pyx_INCREF(Py_None); __Pyx_GIVEREF(Py_None); PyTuple_SET_ITEM(__pyx_t_8, 2+__pyx_t_7, Py_None); __pyx_t_4 = 0; __pyx_t_5 = 0; __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_8, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 6368, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; } __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
6369:
+6370: def insert(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_95insert(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_94insert[] = "Memory.insert(self: u'Memory', address: Address, data: Union[AnyBytes, Value, u'Memory'], backups: Optional[MemoryList] = None) -> None\nInserts data.\n\n Inserts data, moving existing items after the insertion address by the\n size of the inserted data.\n\n Arguments::\n address (int):\n Address of the insertion point.\n\n data (bytes):\n Data to insert.\n\n backups (list of :obj:`Memory`):\n Optional output list holding backup copies of the deleted\n items, before trimming.\n\n Example:\n +---+---+---+---+---+---+---+---+---+---+---+---+\n | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11|\n +===+===+===+===+===+===+===+===+===+===+===+===+\n | |[A | B | C]| | |[x | y | z]| | | |\n +---+---+---+---+---+---+---+---+---+---+---+---+\n | |[A | B | C]| | |[x | y | z]| |[$]| |\n +---+---+---+---+---+---+---+---+---+---+---+---+\n | |[A | B | C]| | |[x | y | 1 | z]| |[$]|\n +---+---+---+---+---+---+---+---+---+---+---+---+\n\n >>> memory = Memory(blocks=[[1, b'ABC'], [6, b'xyz']])\n >>> memory.insert(10, b'$')\n >>> memory._blocks\n [[1, b'ABC'], [6, b'xyz'], [10, b'$']]\n >>> memory.insert(8, b'1')\n >>> memory._blocks\n [[1, b'ABC'], [6, b'xy1z'], [11, b'$']]\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_95insert(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_address = 0; PyObject *__pyx_v_data = 0; PyObject *__pyx_v_backups = 0; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("insert (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_address,&__pyx_n_s_data,&__pyx_n_s_backups,0}; PyObject* values[3] = {0,0,0}; /* … */ /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_94insert(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_address, PyObject *__pyx_v_data, PyObject *__pyx_v_backups) { addr_t __pyx_v_address_; __Pyx_memviewslice __pyx_v_data_view = { 0, 0, { 0 }, { 0 }, { 0 } }; byte_t __pyx_v_data_value; size_t __pyx_v_data_size; byte_t const *__pyx_v_data_ptr; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("insert", 0); /* … */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; __pyx_L1_error:; __PYX_XDEC_MEMVIEW(&__pyx_t_6, 1); __Pyx_AddTraceback("bytesparse._c.Memory.insert", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __PYX_XDEC_MEMVIEW(&__pyx_v_data_view, 1); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
6371: self: 'Memory',
6372: address: Address,
6373: data: Union[AnyBytes, Value, 'Memory'],
+6374: backups: Optional[MemoryList] = None,
values[2] = ((PyObject *)Py_None); if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_address)) != 0)) kw_args--; else goto __pyx_L5_argtuple_error; CYTHON_FALLTHROUGH; case 1: if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_data)) != 0)) kw_args--; else { __Pyx_RaiseArgtupleInvalid("insert", 0, 2, 3, 1); __PYX_ERR(0, 6370, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 2: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_backups); if (value) { values[2] = value; kw_args--; } } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "insert") < 0)) __PYX_ERR(0, 6370, __pyx_L3_error) } } else { switch (PyTuple_GET_SIZE(__pyx_args)) { case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); values[0] = PyTuple_GET_ITEM(__pyx_args, 0); break; default: goto __pyx_L5_argtuple_error; } } __pyx_v_address = values[0]; __pyx_v_data = values[1]; __pyx_v_backups = values[2]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("insert", 0, 2, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 6370, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("bytesparse._c.Memory.insert", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_94insert(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), __pyx_v_address, __pyx_v_data, __pyx_v_backups);
6375: ) -> None:
6376: r"""Inserts data.
6377:
6378: Inserts data, moving existing items after the insertion address by the
6379: size of the inserted data.
6380:
6381: Arguments::
6382: address (int):
6383: Address of the insertion point.
6384:
6385: data (bytes):
6386: Data to insert.
6387:
6388: backups (list of :obj:`Memory`):
6389: Optional output list holding backup copies of the deleted
6390: items, before trimming.
6391:
6392: Example:
6393: +---+---+---+---+---+---+---+---+---+---+---+---+
6394: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11|
6395: +===+===+===+===+===+===+===+===+===+===+===+===+
6396: | |[A | B | C]| | |[x | y | z]| | | |
6397: +---+---+---+---+---+---+---+---+---+---+---+---+
6398: | |[A | B | C]| | |[x | y | z]| |[$]| |
6399: +---+---+---+---+---+---+---+---+---+---+---+---+
6400: | |[A | B | C]| | |[x | y | 1 | z]| |[$]|
6401: +---+---+---+---+---+---+---+---+---+---+---+---+
6402:
6403: >>> memory = Memory(blocks=[[1, b'ABC'], [6, b'xyz']])
6404: >>> memory.insert(10, b'$')
6405: >>> memory._blocks
6406: [[1, b'ABC'], [6, b'xyz'], [10, b'$']]
6407: >>> memory.insert(8, b'1')
6408: >>> memory._blocks
6409: [[1, b'ABC'], [6, b'xy1z'], [11, b'$']]
6410: """
6411: cdef:
+6412: addr_t address_ = <addr_t>address
__pyx_t_1 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_address); if (unlikely((__pyx_t_1 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 6412, __pyx_L1_error) __pyx_v_address_ = ((addr_t)__pyx_t_1);
6413: const byte_t[:] data_view
6414: byte_t data_value
6415: size_t data_size
6416: const byte_t* data_ptr
6417:
+6418: if isinstance(data, Memory):
__pyx_t_2 = __Pyx_TypeCheck(__pyx_v_data, __pyx_ptype_10bytesparse_2_c_Memory);
__pyx_t_3 = (__pyx_t_2 != 0);
if (__pyx_t_3) {
/* … */
goto __pyx_L3;
}
+6419: self.insert_same_(address_, data, backups)
if (!(likely(((__pyx_v_data) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_data, __pyx_ptype_10bytesparse_2_c_Memory))))) __PYX_ERR(0, 6419, __pyx_L1_error) if (!(likely(PyList_CheckExact(__pyx_v_backups))||((__pyx_v_backups) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "list", Py_TYPE(__pyx_v_backups)->tp_name), 0))) __PYX_ERR(0, 6419, __pyx_L1_error) __pyx_t_4 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->insert_same_(__pyx_v_self, __pyx_v_address_, ((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_data), ((PyObject*)__pyx_v_backups)); if (unlikely(__pyx_t_4 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 6419, __pyx_L1_error)
6420:
6421: else:
+6422: if isinstance(data, int):
/*else*/ {
__pyx_t_3 = PyInt_Check(__pyx_v_data);
__pyx_t_2 = (__pyx_t_3 != 0);
if (__pyx_t_2) {
/* … */
goto __pyx_L4;
}
+6423: data_value = <byte_t>data
__pyx_t_5 = __Pyx_PyInt_As_byte_t(__pyx_v_data); if (unlikely((__pyx_t_5 == ((byte_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 6423, __pyx_L1_error) __pyx_v_data_value = ((byte_t)__pyx_t_5);
+6424: data_size = 1
__pyx_v_data_size = 1;
+6425: data_ptr = &data_value
__pyx_v_data_ptr = (&__pyx_v_data_value);
6426: else:
+6427: data_view = data
/*else*/ { __pyx_t_6 = __Pyx_PyObject_to_MemoryviewSlice_ds_nn_byte_t__const__(__pyx_v_data, 0); if (unlikely(!__pyx_t_6.memview)) __PYX_ERR(0, 6427, __pyx_L1_error) __pyx_v_data_view = __pyx_t_6; __pyx_t_6.memview = NULL; __pyx_t_6.data = NULL;
+6428: data_size = len(data_view)
__pyx_t_7 = __Pyx_MemoryView_Len(__pyx_v_data_view);
__pyx_v_data_size = __pyx_t_7;
6429: with cython.boundscheck(False):
+6430: data_ptr = &data_view[0]
__pyx_t_8 = 0; if (__pyx_t_8 < 0) __pyx_t_8 += __pyx_v_data_view.shape[0]; __pyx_v_data_ptr = (&(*((byte_t const *) ( /* dim=0 */ (__pyx_v_data_view.data + __pyx_t_8 * __pyx_v_data_view.strides[0]) )))); } __pyx_L4:;
6431:
+6432: self.insert_raw_(address_, data_size, data_ptr, backups)
if (!(likely(PyList_CheckExact(__pyx_v_backups))||((__pyx_v_backups) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "list", Py_TYPE(__pyx_v_backups)->tp_name), 0))) __PYX_ERR(0, 6432, __pyx_L1_error) __pyx_t_4 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->insert_raw_(__pyx_v_self, __pyx_v_address_, __pyx_v_data_size, __pyx_v_data_ptr, ((PyObject*)__pyx_v_backups)); if (unlikely(__pyx_t_4 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 6432, __pyx_L1_error) } __pyx_L3:;
6433:
+6434: cdef vint delete_(self, addr_t start, addr_t endex, list backups) except -1:
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_6Memory_delete_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, addr_t __pyx_v_start, addr_t __pyx_v_endex, PyObject *__pyx_v_backups) { __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("delete_", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("bytesparse._c.Memory.delete_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+6435: if start < endex:
__pyx_t_1 = ((__pyx_v_start < __pyx_v_endex) != 0); if (__pyx_t_1) { /* … */ }
+6436: if backups is not None:
__pyx_t_1 = (__pyx_v_backups != ((PyObject*)Py_None)); __pyx_t_2 = (__pyx_t_1 != 0); if (__pyx_t_2) { /* … */ }
+6437: backups.append(self.extract_(start, endex, 0, NULL, 1, True))
if (unlikely(__pyx_v_backups == Py_None)) { PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "append"); __PYX_ERR(0, 6437, __pyx_L1_error) } __pyx_t_3 = ((PyObject *)((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->extract_(__pyx_v_self, __pyx_v_start, __pyx_v_endex, 0, NULL, 1, 1)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 6437, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_4 = __Pyx_PyList_Append(__pyx_v_backups, __pyx_t_3); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(0, 6437, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
6438:
+6439: self._erase_(start, endex, True, True) # delete
__pyx_t_2 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_erase_(__pyx_v_self, __pyx_v_start, __pyx_v_endex, 1, 1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 6439, __pyx_L1_error)
6440:
+6441: def delete(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_97delete(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_96delete[] = "Memory.delete(self: u'Memory', start: Optional[Address] = None, endex: Optional[Address] = None, backups: Optional[MemoryList] = None) -> None\nDeletes an address range.\n\n Arguments:\n start (int):\n Inclusive start address for deletion.\n If ``None``, :attr:`start` is considered.\n\n endex (int):\n Exclusive end address for deletion.\n If ``None``, :attr:`endex` is considered.\n\n backups (list of :obj:`Memory`):\n Optional output list holding backup copies of the deleted\n items.\n\n Example:\n +---+---+---+---+---+---+---+---+---+---+\n | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11| 12| 13|\n +===+===+===+===+===+===+===+===+===+===+\n | |[A | B | C]| | |[x | y | z]| |\n +---+---+---+---+---+---+---+---+---+---+\n | |[A | y | z]| | | | | | |\n +---+---+---+---+---+---+---+---+---+---+\n\n >>> memory = Memory(blocks=[[5, b'ABC'], [9, b'xyz']])\n >>> memory.delete(6, 10)\n >>> memory._blocks\n [[5, b'Ayz']]\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_97delete(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_start = 0; PyObject *__pyx_v_endex = 0; PyObject *__pyx_v_backups = 0; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("delete (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_start,&__pyx_n_s_endex,&__pyx_n_s_backups,0}; PyObject* values[3] = {0,0,0}; /* … */ /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_96delete(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_start, PyObject *__pyx_v_endex, PyObject *__pyx_v_backups) { addr_t __pyx_v_start_; addr_t __pyx_v_endex_; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("delete", 0); /* … */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Memory.delete", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
6442: self: 'Memory',
+6443: start: Optional[Address] = None,
values[0] = ((PyObject *)Py_None);
+6444: endex: Optional[Address] = None,
values[1] = ((PyObject *)Py_None);
+6445: backups: Optional[MemoryList] = None,
values[2] = ((PyObject *)Py_None); if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_start); if (value) { values[0] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 1: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_endex); if (value) { values[1] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 2: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_backups); if (value) { values[2] = value; kw_args--; } } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "delete") < 0)) __PYX_ERR(0, 6441, __pyx_L3_error) } } else { switch (PyTuple_GET_SIZE(__pyx_args)) { case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } } __pyx_v_start = values[0]; __pyx_v_endex = values[1]; __pyx_v_backups = values[2]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("delete", 0, 0, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 6441, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("bytesparse._c.Memory.delete", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_96delete(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), __pyx_v_start, __pyx_v_endex, __pyx_v_backups);
6446: ) -> None:
6447: r"""Deletes an address range.
6448:
6449: Arguments:
6450: start (int):
6451: Inclusive start address for deletion.
6452: If ``None``, :attr:`start` is considered.
6453:
6454: endex (int):
6455: Exclusive end address for deletion.
6456: If ``None``, :attr:`endex` is considered.
6457:
6458: backups (list of :obj:`Memory`):
6459: Optional output list holding backup copies of the deleted
6460: items.
6461:
6462: Example:
6463: +---+---+---+---+---+---+---+---+---+---+
6464: | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11| 12| 13|
6465: +===+===+===+===+===+===+===+===+===+===+
6466: | |[A | B | C]| | |[x | y | z]| |
6467: +---+---+---+---+---+---+---+---+---+---+
6468: | |[A | y | z]| | | | | | |
6469: +---+---+---+---+---+---+---+---+---+---+
6470:
6471: >>> memory = Memory(blocks=[[5, b'ABC'], [9, b'xyz']])
6472: >>> memory.delete(6, 10)
6473: >>> memory._blocks
6474: [[5, b'Ayz']]
6475: """
6476: cdef:
6477: addr_t start_
6478: addr_t endex_
6479:
+6480: start_, endex_ = self.bound_(start, endex)
__pyx_t_1 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->bound_(__pyx_v_self, __pyx_v_start, __pyx_v_endex); __pyx_t_2 = __pyx_t_1.f0; __pyx_t_3 = __pyx_t_1.f1; __pyx_v_start_ = __pyx_t_2; __pyx_v_endex_ = __pyx_t_3;
+6481: self.delete_(start_, endex_, backups)
if (!(likely(PyList_CheckExact(__pyx_v_backups))||((__pyx_v_backups) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "list", Py_TYPE(__pyx_v_backups)->tp_name), 0))) __PYX_ERR(0, 6481, __pyx_L1_error) __pyx_t_4 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->delete_(__pyx_v_self, __pyx_v_start_, __pyx_v_endex_, ((PyObject*)__pyx_v_backups)); if (unlikely(__pyx_t_4 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 6481, __pyx_L1_error)
6482:
+6483: cdef vint clear_(self, addr_t start, addr_t endex, list backups) except -1:
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_6Memory_clear_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, addr_t __pyx_v_start, addr_t __pyx_v_endex, PyObject *__pyx_v_backups) { __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("clear_", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("bytesparse._c.Memory.clear_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
+6484: if start < endex:
__pyx_t_1 = ((__pyx_v_start < __pyx_v_endex) != 0); if (__pyx_t_1) { /* … */ }
+6485: if backups is not None:
__pyx_t_1 = (__pyx_v_backups != ((PyObject*)Py_None)); __pyx_t_2 = (__pyx_t_1 != 0); if (__pyx_t_2) { /* … */ }
+6486: backups.append(self.extract_(start, endex, 0, NULL, 1, True))
if (unlikely(__pyx_v_backups == Py_None)) { PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "append"); __PYX_ERR(0, 6486, __pyx_L1_error) } __pyx_t_3 = ((PyObject *)((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->extract_(__pyx_v_self, __pyx_v_start, __pyx_v_endex, 0, NULL, 1, 1)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 6486, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_4 = __Pyx_PyList_Append(__pyx_v_backups, __pyx_t_3); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(0, 6486, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
6487:
+6488: self._erase_(start, endex, False, False) # clear
__pyx_t_2 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_erase_(__pyx_v_self, __pyx_v_start, __pyx_v_endex, 0, 0); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 6488, __pyx_L1_error)
6489:
+6490: def clear(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_99clear(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_98clear[] = "Memory.clear(self: u'Memory', start: Optional[Address] = None, endex: Optional[Address] = None, backups: Optional[MemoryList] = None) -> None\nClears an address range.\n\n Arguments:\n start (int):\n Inclusive start address for clearing.\n If ``None``, :attr:`start` is considered.\n\n endex (int):\n Exclusive end address for clearing.\n If ``None``, :attr:`endex` is considered.\n\n backups (list of :obj:`Memory`):\n Optional output list holding backup copies of the cleared\n items.\n\n Example:\n +---+---+---+---+---+---+---+---+---+\n | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11| 12|\n +===+===+===+===+===+===+===+===+===+\n | |[A | B | C]| |[x | y | z]| |\n +---+---+---+---+---+---+---+---+---+\n | |[A]| | | | |[y | z]| |\n +---+---+---+---+---+---+---+---+---+\n\n >>> memory = Memory(blocks=[[5, b'ABC'], [9, b'xyz']])\n >>> memory.clear(6, 10)\n >>> memory._blocks\n [[5, b'A'], [10, b'yz']]\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_99clear(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_start = 0; PyObject *__pyx_v_endex = 0; PyObject *__pyx_v_backups = 0; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("clear (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_start,&__pyx_n_s_endex,&__pyx_n_s_backups,0}; PyObject* values[3] = {0,0,0}; /* … */ /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_98clear(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_start, PyObject *__pyx_v_endex, PyObject *__pyx_v_backups) { addr_t __pyx_v_start_; addr_t __pyx_v_endex_; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("clear", 0); /* … */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Memory.clear", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
6491: self: 'Memory',
+6492: start: Optional[Address] = None,
values[0] = ((PyObject *)Py_None);
+6493: endex: Optional[Address] = None,
values[1] = ((PyObject *)Py_None);
+6494: backups: Optional[MemoryList] = None,
values[2] = ((PyObject *)Py_None); if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_start); if (value) { values[0] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 1: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_endex); if (value) { values[1] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 2: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_backups); if (value) { values[2] = value; kw_args--; } } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "clear") < 0)) __PYX_ERR(0, 6490, __pyx_L3_error) } } else { switch (PyTuple_GET_SIZE(__pyx_args)) { case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } } __pyx_v_start = values[0]; __pyx_v_endex = values[1]; __pyx_v_backups = values[2]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("clear", 0, 0, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 6490, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("bytesparse._c.Memory.clear", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_98clear(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), __pyx_v_start, __pyx_v_endex, __pyx_v_backups);
6495: ) -> None:
6496: r"""Clears an address range.
6497:
6498: Arguments:
6499: start (int):
6500: Inclusive start address for clearing.
6501: If ``None``, :attr:`start` is considered.
6502:
6503: endex (int):
6504: Exclusive end address for clearing.
6505: If ``None``, :attr:`endex` is considered.
6506:
6507: backups (list of :obj:`Memory`):
6508: Optional output list holding backup copies of the cleared
6509: items.
6510:
6511: Example:
6512: +---+---+---+---+---+---+---+---+---+
6513: | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11| 12|
6514: +===+===+===+===+===+===+===+===+===+
6515: | |[A | B | C]| |[x | y | z]| |
6516: +---+---+---+---+---+---+---+---+---+
6517: | |[A]| | | | |[y | z]| |
6518: +---+---+---+---+---+---+---+---+---+
6519:
6520: >>> memory = Memory(blocks=[[5, b'ABC'], [9, b'xyz']])
6521: >>> memory.clear(6, 10)
6522: >>> memory._blocks
6523: [[5, b'A'], [10, b'yz']]
6524: """
6525: cdef:
6526: addr_t start_
6527: addr_t endex_
6528:
+6529: start_, endex_ = self.bound_(start, endex)
__pyx_t_1 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->bound_(__pyx_v_self, __pyx_v_start, __pyx_v_endex); __pyx_t_2 = __pyx_t_1.f0; __pyx_t_3 = __pyx_t_1.f1; __pyx_v_start_ = __pyx_t_2; __pyx_v_endex_ = __pyx_t_3;
+6530: self.clear_(start_, endex_, backups)
if (!(likely(PyList_CheckExact(__pyx_v_backups))||((__pyx_v_backups) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "list", Py_TYPE(__pyx_v_backups)->tp_name), 0))) __PYX_ERR(0, 6530, __pyx_L1_error) __pyx_t_4 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->clear_(__pyx_v_self, __pyx_v_start_, __pyx_v_endex_, ((PyObject*)__pyx_v_backups)); if (unlikely(__pyx_t_4 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 6530, __pyx_L1_error)
6531:
+6532: cdef vint _pretrim_start_(self, addr_t endex_max, addr_t size, list backups) except -1:
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_6Memory__pretrim_start_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, addr_t __pyx_v_endex_max, addr_t __pyx_v_size, PyObject *__pyx_v_backups) { addr_t __pyx_v_trim_start; addr_t __pyx_v_endex; __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("_pretrim_start_", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_4); __Pyx_XDECREF(__pyx_t_5); __Pyx_XDECREF(__pyx_t_6); __Pyx_AddTraceback("bytesparse._c.Memory._pretrim_start_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
6533: cdef:
6534: addr_t trim_start
6535: addr_t endex
6536:
+6537: if size:
__pyx_t_1 = (__pyx_v_size != 0); if (__pyx_t_1) { /* … */ }
+6538: trim_start = self._trim_start if self._trim_start_ else ADDR_MIN
if ((__pyx_v_self->_trim_start_ != 0)) { __pyx_t_2 = __pyx_v_self->_trim_start; } else { __pyx_t_2 = ADDR_MIN; } __pyx_v_trim_start = __pyx_t_2;
+6539: if CannotAddAddrU(trim_start, size):
__pyx_t_1 = (__pyx_f_10bytesparse_2_c_CannotAddAddrU(__pyx_v_trim_start, __pyx_v_size) != 0); if (__pyx_t_1) { /* … */ goto __pyx_L4; }
+6540: endex = ADDR_MAX
__pyx_v_endex = ADDR_MAX;
6541: else:
+6542: endex = trim_start + size
/*else*/ { __pyx_v_endex = (__pyx_v_trim_start + __pyx_v_size); } __pyx_L4:;
6543:
+6544: if endex > endex_max:
__pyx_t_1 = ((__pyx_v_endex > __pyx_v_endex_max) != 0); if (__pyx_t_1) { /* … */ }
+6545: endex = endex_max
__pyx_v_endex = __pyx_v_endex_max;
6546:
+6547: if backups is not None:
__pyx_t_1 = (__pyx_v_backups != ((PyObject*)Py_None)); __pyx_t_3 = (__pyx_t_1 != 0); if (__pyx_t_3) { /* … */ }
+6548: backups.append(self.extract(endex=endex))
if (unlikely(__pyx_v_backups == Py_None)) { PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "append"); __PYX_ERR(0, 6548, __pyx_L1_error) } __pyx_t_4 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_extract); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 6548, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __pyx_t_5 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 6548, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __pyx_t_6 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_endex); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 6548, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); if (PyDict_SetItem(__pyx_t_5, __pyx_n_s_endex, __pyx_t_6) < 0) __PYX_ERR(0, 6548, __pyx_L1_error) __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __pyx_t_6 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_empty_tuple, __pyx_t_5); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 6548, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; __pyx_t_7 = __Pyx_PyList_Append(__pyx_v_backups, __pyx_t_6); if (unlikely(__pyx_t_7 == ((int)-1))) __PYX_ERR(0, 6548, __pyx_L1_error) __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
6549:
+6550: self._erase_(ADDR_MIN, endex, False, False) # clear
__pyx_t_3 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_erase_(__pyx_v_self, ADDR_MIN, __pyx_v_endex, 0, 0); if (unlikely(__pyx_t_3 == ((int)-1))) __PYX_ERR(0, 6550, __pyx_L1_error)
6551:
+6552: def _pretrim_start(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_101_pretrim_start(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_100_pretrim_start[] = "Memory._pretrim_start(self: u'Memory', endex_max: Optional[Address], size: Address, backups: Optional[MemoryList]) -> None\nTrims initial data.\n\n Low-level method to manage trimming of data starting from an address.\n\n Arguments:\n endex_max (int):\n Exclusive end address of the erasure range.\n If ``None``, :attr:`trim_start` plus `size` is considered.\n\n size (int):\n Size of the erasure range.\n\n backups (list of :obj:`Memory`):\n Optional output list holding backup copies of the cleared\n items.\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_101_pretrim_start(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_endex_max = 0; PyObject *__pyx_v_size = 0; PyObject *__pyx_v_backups = 0; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("_pretrim_start (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_endex_max,&__pyx_n_s_size,&__pyx_n_s_backups,0}; PyObject* values[3] = {0,0,0}; if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_endex_max)) != 0)) kw_args--; else goto __pyx_L5_argtuple_error; CYTHON_FALLTHROUGH; case 1: if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_size)) != 0)) kw_args--; else { __Pyx_RaiseArgtupleInvalid("_pretrim_start", 1, 3, 3, 1); __PYX_ERR(0, 6552, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 2: if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_backups)) != 0)) kw_args--; else { __Pyx_RaiseArgtupleInvalid("_pretrim_start", 1, 3, 3, 2); __PYX_ERR(0, 6552, __pyx_L3_error) } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "_pretrim_start") < 0)) __PYX_ERR(0, 6552, __pyx_L3_error) } } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { goto __pyx_L5_argtuple_error; } else { values[0] = PyTuple_GET_ITEM(__pyx_args, 0); values[1] = PyTuple_GET_ITEM(__pyx_args, 1); values[2] = PyTuple_GET_ITEM(__pyx_args, 2); } __pyx_v_endex_max = values[0]; __pyx_v_size = values[1]; __pyx_v_backups = values[2]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("_pretrim_start", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 6552, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("bytesparse._c.Memory._pretrim_start", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_100_pretrim_start(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), __pyx_v_endex_max, __pyx_v_size, __pyx_v_backups); int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_100_pretrim_start(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_endex_max, PyObject *__pyx_v_size, PyObject *__pyx_v_backups) { addr_t __pyx_v_endex_max_; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("_pretrim_start", 0); /* … */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Memory._pretrim_start", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
6553: self: 'Memory',
6554: endex_max: Optional[Address],
6555: size: Address,
6556: backups: Optional[MemoryList],
6557: ) -> None:
6558: r"""Trims initial data.
6559:
6560: Low-level method to manage trimming of data starting from an address.
6561:
6562: Arguments:
6563: endex_max (int):
6564: Exclusive end address of the erasure range.
6565: If ``None``, :attr:`trim_start` plus `size` is considered.
6566:
6567: size (int):
6568: Size of the erasure range.
6569:
6570: backups (list of :obj:`Memory`):
6571: Optional output list holding backup copies of the cleared
6572: items.
6573: """
6574: cdef:
+6575: addr_t endex_max_ = ADDR_MAX if endex_max is None else <addr_t>endex_max
__pyx_t_2 = (__pyx_v_endex_max == Py_None); if ((__pyx_t_2 != 0)) { __pyx_t_1 = ADDR_MAX; } else { __pyx_t_3 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_endex_max); if (unlikely((__pyx_t_3 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 6575, __pyx_L1_error) __pyx_t_1 = ((addr_t)__pyx_t_3); } __pyx_v_endex_max_ = __pyx_t_1;
6576:
+6577: self._pretrim_start_(endex_max_, <addr_t>size, backups)
__pyx_t_1 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_size); if (unlikely((__pyx_t_1 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 6577, __pyx_L1_error) if (!(likely(PyList_CheckExact(__pyx_v_backups))||((__pyx_v_backups) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "list", Py_TYPE(__pyx_v_backups)->tp_name), 0))) __PYX_ERR(0, 6577, __pyx_L1_error) __pyx_t_4 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_pretrim_start_(__pyx_v_self, __pyx_v_endex_max_, ((addr_t)__pyx_t_1), ((PyObject*)__pyx_v_backups)); if (unlikely(__pyx_t_4 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 6577, __pyx_L1_error)
6578:
+6579: cdef vint _pretrim_endex_(self, addr_t start_min, addr_t size, list backups) except -1:
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_6Memory__pretrim_endex_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, addr_t __pyx_v_start_min, addr_t __pyx_v_size, PyObject *__pyx_v_backups) { addr_t __pyx_v_trim_endex; addr_t __pyx_v_start; __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("_pretrim_endex_", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_4); __Pyx_XDECREF(__pyx_t_5); __Pyx_XDECREF(__pyx_t_6); __Pyx_AddTraceback("bytesparse._c.Memory._pretrim_endex_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
6580: cdef:
6581: addr_t trim_endex
6582: addr_t start
6583:
+6584: if size:
__pyx_t_1 = (__pyx_v_size != 0); if (__pyx_t_1) { /* … */ }
+6585: trim_endex = self._trim_endex if self._trim_endex_ else ADDR_MAX
if ((__pyx_v_self->_trim_endex_ != 0)) { __pyx_t_2 = __pyx_v_self->_trim_endex; } else { __pyx_t_2 = ADDR_MAX; } __pyx_v_trim_endex = __pyx_t_2;
+6586: if CannotSubAddrU(trim_endex, size):
__pyx_t_1 = (__pyx_f_10bytesparse_2_c_CannotSubAddrU(__pyx_v_trim_endex, __pyx_v_size) != 0); if (__pyx_t_1) { /* … */ goto __pyx_L4; }
+6587: start = ADDR_MIN
__pyx_v_start = ADDR_MIN;
6588: else:
+6589: start = trim_endex - size
/*else*/ { __pyx_v_start = (__pyx_v_trim_endex - __pyx_v_size); } __pyx_L4:;
6590:
+6591: if start < start_min:
__pyx_t_1 = ((__pyx_v_start < __pyx_v_start_min) != 0); if (__pyx_t_1) { /* … */ }
+6592: start = start_min
__pyx_v_start = __pyx_v_start_min;
6593:
+6594: if backups is not None:
__pyx_t_1 = (__pyx_v_backups != ((PyObject*)Py_None)); __pyx_t_3 = (__pyx_t_1 != 0); if (__pyx_t_3) { /* … */ }
+6595: backups.append(self.extract(start=start))
if (unlikely(__pyx_v_backups == Py_None)) { PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "append"); __PYX_ERR(0, 6595, __pyx_L1_error) } __pyx_t_4 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_extract); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 6595, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __pyx_t_5 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 6595, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __pyx_t_6 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_start); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 6595, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); if (PyDict_SetItem(__pyx_t_5, __pyx_n_s_start, __pyx_t_6) < 0) __PYX_ERR(0, 6595, __pyx_L1_error) __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __pyx_t_6 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_empty_tuple, __pyx_t_5); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 6595, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; __pyx_t_7 = __Pyx_PyList_Append(__pyx_v_backups, __pyx_t_6); if (unlikely(__pyx_t_7 == ((int)-1))) __PYX_ERR(0, 6595, __pyx_L1_error) __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
6596:
+6597: self._erase_(start, ADDR_MAX, False, False) # clear
__pyx_t_3 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_erase_(__pyx_v_self, __pyx_v_start, ADDR_MAX, 0, 0); if (unlikely(__pyx_t_3 == ((int)-1))) __PYX_ERR(0, 6597, __pyx_L1_error)
6598:
+6599: def _pretrim_endex(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_103_pretrim_endex(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_102_pretrim_endex[] = "Memory._pretrim_endex(self: u'Memory', start_min: Optional[Address], size: Address, backups: Optional[MemoryList]) -> None\nTrims final data.\n\n Low-level method to manage trimming of data starting from an address.\n\n Arguments:\n start_min (int):\n Starting address of the erasure range.\n If ``None``, :attr:`trim_endex` minus `size` is considered.\n\n size (int):\n Size of the erasure range.\n\n backups (list of :obj:`Memory`):\n Optional output list holding backup copies of the cleared\n items.\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_103_pretrim_endex(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_start_min = 0; PyObject *__pyx_v_size = 0; PyObject *__pyx_v_backups = 0; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("_pretrim_endex (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_start_min,&__pyx_n_s_size,&__pyx_n_s_backups,0}; PyObject* values[3] = {0,0,0}; if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_start_min)) != 0)) kw_args--; else goto __pyx_L5_argtuple_error; CYTHON_FALLTHROUGH; case 1: if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_size)) != 0)) kw_args--; else { __Pyx_RaiseArgtupleInvalid("_pretrim_endex", 1, 3, 3, 1); __PYX_ERR(0, 6599, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 2: if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_backups)) != 0)) kw_args--; else { __Pyx_RaiseArgtupleInvalid("_pretrim_endex", 1, 3, 3, 2); __PYX_ERR(0, 6599, __pyx_L3_error) } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "_pretrim_endex") < 0)) __PYX_ERR(0, 6599, __pyx_L3_error) } } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { goto __pyx_L5_argtuple_error; } else { values[0] = PyTuple_GET_ITEM(__pyx_args, 0); values[1] = PyTuple_GET_ITEM(__pyx_args, 1); values[2] = PyTuple_GET_ITEM(__pyx_args, 2); } __pyx_v_start_min = values[0]; __pyx_v_size = values[1]; __pyx_v_backups = values[2]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("_pretrim_endex", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 6599, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("bytesparse._c.Memory._pretrim_endex", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_102_pretrim_endex(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), __pyx_v_start_min, __pyx_v_size, __pyx_v_backups); int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_102_pretrim_endex(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_start_min, PyObject *__pyx_v_size, PyObject *__pyx_v_backups) { addr_t __pyx_v_start_min_; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("_pretrim_endex", 0); /* … */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Memory._pretrim_endex", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
6600: self: 'Memory',
6601: start_min: Optional[Address],
6602: size: Address,
6603: backups: Optional[MemoryList],
6604: ) -> None:
6605: r"""Trims final data.
6606:
6607: Low-level method to manage trimming of data starting from an address.
6608:
6609: Arguments:
6610: start_min (int):
6611: Starting address of the erasure range.
6612: If ``None``, :attr:`trim_endex` minus `size` is considered.
6613:
6614: size (int):
6615: Size of the erasure range.
6616:
6617: backups (list of :obj:`Memory`):
6618: Optional output list holding backup copies of the cleared
6619: items.
6620: """
6621: cdef:
+6622: addr_t start_min_ = ADDR_MIN if start_min is None else <addr_t>start_min
__pyx_t_2 = (__pyx_v_start_min == Py_None); if ((__pyx_t_2 != 0)) { __pyx_t_1 = ADDR_MIN; } else { __pyx_t_3 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_start_min); if (unlikely((__pyx_t_3 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 6622, __pyx_L1_error) __pyx_t_1 = ((addr_t)__pyx_t_3); } __pyx_v_start_min_ = __pyx_t_1;
6623:
+6624: self._pretrim_endex_(start_min_, <addr_t>size, backups)
__pyx_t_1 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_size); if (unlikely((__pyx_t_1 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 6624, __pyx_L1_error) if (!(likely(PyList_CheckExact(__pyx_v_backups))||((__pyx_v_backups) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "list", Py_TYPE(__pyx_v_backups)->tp_name), 0))) __PYX_ERR(0, 6624, __pyx_L1_error) __pyx_t_4 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_pretrim_endex_(__pyx_v_self, __pyx_v_start_min_, ((addr_t)__pyx_t_1), ((PyObject*)__pyx_v_backups)); if (unlikely(__pyx_t_4 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 6624, __pyx_L1_error)
6625:
+6626: cdef vint _crop_(self, addr_t start, addr_t endex, list backups) except -1:
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_6Memory__crop_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, addr_t __pyx_v_start, addr_t __pyx_v_endex, PyObject *__pyx_v_backups) { addr_t __pyx_v_block_start; addr_t __pyx_v_block_endex; __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("_crop_", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("bytesparse._c.Memory._crop_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
6627: cdef:
6628: addr_t block_start
6629: addr_t block_endex
6630:
6631: # Trim blocks exceeding before memory start
+6632: if Rack_Length(self._):
__pyx_t_1 = (__pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_self->_) != 0); if (__pyx_t_1) { /* … */ }
+6633: block_start = Block_Start(Rack_First_(self._))
__pyx_v_block_start = __pyx_f_10bytesparse_2_c_Block_Start(__pyx_f_10bytesparse_2_c_Rack_First_(__pyx_v_self->_));
6634:
+6635: if block_start < start:
__pyx_t_1 = ((__pyx_v_block_start < __pyx_v_start) != 0); if (__pyx_t_1) { /* … */ }
+6636: if backups is not None:
__pyx_t_1 = (__pyx_v_backups != ((PyObject*)Py_None)); __pyx_t_2 = (__pyx_t_1 != 0); if (__pyx_t_2) { /* … */ }
+6637: backups.append(self.extract_(block_start, start, 0, NULL, 1, True))
if (unlikely(__pyx_v_backups == Py_None)) { PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "append"); __PYX_ERR(0, 6637, __pyx_L1_error) } __pyx_t_3 = ((PyObject *)((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->extract_(__pyx_v_self, __pyx_v_block_start, __pyx_v_start, 0, NULL, 1, 1)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 6637, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_4 = __Pyx_PyList_Append(__pyx_v_backups, __pyx_t_3); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(0, 6637, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
6638:
+6639: self._erase_(block_start, start, False, False) # clear
__pyx_t_2 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_erase_(__pyx_v_self, __pyx_v_block_start, __pyx_v_start, 0, 0); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 6639, __pyx_L1_error)
6640:
6641: # Trim blocks exceeding after memory end
+6642: if Rack_Length(self._):
__pyx_t_2 = (__pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_self->_) != 0); if (__pyx_t_2) { /* … */ }
+6643: block_endex = Block_Endex(Rack_Last_(self._))
__pyx_v_block_endex = __pyx_f_10bytesparse_2_c_Block_Endex(__pyx_f_10bytesparse_2_c_Rack_Last_(__pyx_v_self->_));
6644:
+6645: if endex < block_endex:
__pyx_t_2 = ((__pyx_v_endex < __pyx_v_block_endex) != 0); if (__pyx_t_2) { /* … */ }
+6646: if backups is not None:
__pyx_t_2 = (__pyx_v_backups != ((PyObject*)Py_None)); __pyx_t_1 = (__pyx_t_2 != 0); if (__pyx_t_1) { /* … */ }
+6647: backups.append(self.extract_(endex, block_endex, 0, NULL, 1, True))
if (unlikely(__pyx_v_backups == Py_None)) { PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "append"); __PYX_ERR(0, 6647, __pyx_L1_error) } __pyx_t_3 = ((PyObject *)((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->extract_(__pyx_v_self, __pyx_v_endex, __pyx_v_block_endex, 0, NULL, 1, 1)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 6647, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_4 = __Pyx_PyList_Append(__pyx_v_backups, __pyx_t_3); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(0, 6647, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
6648:
+6649: self._erase_(endex, block_endex, False, False) # clear
__pyx_t_1 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_erase_(__pyx_v_self, __pyx_v_endex, __pyx_v_block_endex, 0, 0); if (unlikely(__pyx_t_1 == ((int)-1))) __PYX_ERR(0, 6649, __pyx_L1_error)
6650:
+6651: def _crop(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_105_crop(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_104_crop[] = "Memory._crop(self: u'Memory', start: Address, endex: Address, backups: Optional[MemoryList] = None) -> None\nKeeps data within an address range.\n\n Low-level method to crop the underlying data structure.\n\n Arguments:\n start (int):\n Inclusive start address for cropping.\n If ``None``, :attr:`start` is considered.\n\n endex (int):\n Exclusive end address for cropping.\n If ``None``, :attr:`endex` is considered.\n\n backups (list of :obj:`Memory`):\n Optional output list holding backup copies of the cleared\n items.\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_105_crop(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_start = 0; PyObject *__pyx_v_endex = 0; PyObject *__pyx_v_backups = 0; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("_crop (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_start,&__pyx_n_s_endex,&__pyx_n_s_backups,0}; PyObject* values[3] = {0,0,0}; /* … */ /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_104_crop(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_start, PyObject *__pyx_v_endex, PyObject *__pyx_v_backups) { addr_t __pyx_v_start_; addr_t __pyx_v_endex_; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("_crop", 0); /* … */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Memory._crop", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
6652: self: 'Memory',
6653: start: Address,
6654: endex: Address,
+6655: backups: Optional[MemoryList] = None,
values[2] = ((PyObject *)Py_None); if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_start)) != 0)) kw_args--; else goto __pyx_L5_argtuple_error; CYTHON_FALLTHROUGH; case 1: if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_endex)) != 0)) kw_args--; else { __Pyx_RaiseArgtupleInvalid("_crop", 0, 2, 3, 1); __PYX_ERR(0, 6651, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 2: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_backups); if (value) { values[2] = value; kw_args--; } } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "_crop") < 0)) __PYX_ERR(0, 6651, __pyx_L3_error) } } else { switch (PyTuple_GET_SIZE(__pyx_args)) { case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); values[0] = PyTuple_GET_ITEM(__pyx_args, 0); break; default: goto __pyx_L5_argtuple_error; } } __pyx_v_start = values[0]; __pyx_v_endex = values[1]; __pyx_v_backups = values[2]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("_crop", 0, 2, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 6651, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("bytesparse._c.Memory._crop", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_104_crop(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), __pyx_v_start, __pyx_v_endex, __pyx_v_backups);
6656: ) -> None:
6657: r"""Keeps data within an address range.
6658:
6659: Low-level method to crop the underlying data structure.
6660:
6661: Arguments:
6662: start (int):
6663: Inclusive start address for cropping.
6664: If ``None``, :attr:`start` is considered.
6665:
6666: endex (int):
6667: Exclusive end address for cropping.
6668: If ``None``, :attr:`endex` is considered.
6669:
6670: backups (list of :obj:`Memory`):
6671: Optional output list holding backup copies of the cleared
6672: items.
6673: """
6674: cdef:
6675: addr_t start_
6676: addr_t endex_
6677:
+6678: start_, endex_ = self.bound_(start, endex)
__pyx_t_1 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->bound_(__pyx_v_self, __pyx_v_start, __pyx_v_endex); __pyx_t_2 = __pyx_t_1.f0; __pyx_t_3 = __pyx_t_1.f1; __pyx_v_start_ = __pyx_t_2; __pyx_v_endex_ = __pyx_t_3;
+6679: self._crop_(start_, endex_, backups)
if (!(likely(PyList_CheckExact(__pyx_v_backups))||((__pyx_v_backups) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "list", Py_TYPE(__pyx_v_backups)->tp_name), 0))) __PYX_ERR(0, 6679, __pyx_L1_error) __pyx_t_4 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_crop_(__pyx_v_self, __pyx_v_start_, __pyx_v_endex_, ((PyObject*)__pyx_v_backups)); if (unlikely(__pyx_t_4 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 6679, __pyx_L1_error)
6680:
+6681: def crop(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_107crop(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_106crop[] = "Memory.crop(self: u'Memory', start: Optional[Address] = None, endex: Optional[Address] = None, backups: Optional[MemoryList] = None) -> None\nKeeps data within an address range.\n\n Arguments:\n start (int):\n Inclusive start address for cropping.\n If ``None``, :attr:`start` is considered.\n\n endex (int):\n Exclusive end address for cropping.\n If ``None``, :attr:`endex` is considered.\n\n backups (list of :obj:`Memory`):\n Optional output list holding backup copies of the cleared\n items.\n\n Example:\n +---+---+---+---+---+---+---+---+---+\n | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11| 12|\n +===+===+===+===+===+===+===+===+===+\n | |[A | B | C]| |[x | y | z]| |\n +---+---+---+---+---+---+---+---+---+\n | | |[B | C]| |[x]| | | |\n +---+---+---+---+---+---+---+---+---+\n\n >>> memory = Memory(blocks=[[5, b'ABC'], [9, b'xyz']])\n >>> memory.crop(6, 10)\n >>> memory._blocks\n [[6, b'BC'], [9, b'x']]\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_107crop(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_start = 0; PyObject *__pyx_v_endex = 0; PyObject *__pyx_v_backups = 0; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("crop (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_start,&__pyx_n_s_endex,&__pyx_n_s_backups,0}; PyObject* values[3] = {0,0,0}; /* … */ /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_106crop(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_start, PyObject *__pyx_v_endex, PyObject *__pyx_v_backups) { addr_t __pyx_v_start_; addr_t __pyx_v_endex_; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("crop", 0); /* … */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Memory.crop", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
6682: self: 'Memory',
+6683: start: Optional[Address] = None,
values[0] = ((PyObject *)Py_None);
+6684: endex: Optional[Address] = None,
values[1] = ((PyObject *)Py_None);
+6685: backups: Optional[MemoryList] = None,
values[2] = ((PyObject *)Py_None); if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_start); if (value) { values[0] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 1: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_endex); if (value) { values[1] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 2: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_backups); if (value) { values[2] = value; kw_args--; } } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "crop") < 0)) __PYX_ERR(0, 6681, __pyx_L3_error) } } else { switch (PyTuple_GET_SIZE(__pyx_args)) { case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } } __pyx_v_start = values[0]; __pyx_v_endex = values[1]; __pyx_v_backups = values[2]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("crop", 0, 0, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 6681, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("bytesparse._c.Memory.crop", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_106crop(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), __pyx_v_start, __pyx_v_endex, __pyx_v_backups);
6686: ) -> None:
6687: r"""Keeps data within an address range.
6688:
6689: Arguments:
6690: start (int):
6691: Inclusive start address for cropping.
6692: If ``None``, :attr:`start` is considered.
6693:
6694: endex (int):
6695: Exclusive end address for cropping.
6696: If ``None``, :attr:`endex` is considered.
6697:
6698: backups (list of :obj:`Memory`):
6699: Optional output list holding backup copies of the cleared
6700: items.
6701:
6702: Example:
6703: +---+---+---+---+---+---+---+---+---+
6704: | 4 | 5 | 6 | 7 | 8 | 9 | 10| 11| 12|
6705: +===+===+===+===+===+===+===+===+===+
6706: | |[A | B | C]| |[x | y | z]| |
6707: +---+---+---+---+---+---+---+---+---+
6708: | | |[B | C]| |[x]| | | |
6709: +---+---+---+---+---+---+---+---+---+
6710:
6711: >>> memory = Memory(blocks=[[5, b'ABC'], [9, b'xyz']])
6712: >>> memory.crop(6, 10)
6713: >>> memory._blocks
6714: [[6, b'BC'], [9, b'x']]
6715: """
6716: cdef:
6717: addr_t start_
6718: addr_t endex_
6719:
+6720: start_, endex_ = self.bound_(start, endex)
__pyx_t_1 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->bound_(__pyx_v_self, __pyx_v_start, __pyx_v_endex); __pyx_t_2 = __pyx_t_1.f0; __pyx_t_3 = __pyx_t_1.f1; __pyx_v_start_ = __pyx_t_2; __pyx_v_endex_ = __pyx_t_3;
+6721: self._crop_(start_, endex_, backups)
if (!(likely(PyList_CheckExact(__pyx_v_backups))||((__pyx_v_backups) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "list", Py_TYPE(__pyx_v_backups)->tp_name), 0))) __PYX_ERR(0, 6721, __pyx_L1_error) __pyx_t_4 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_crop_(__pyx_v_self, __pyx_v_start_, __pyx_v_endex_, ((PyObject*)__pyx_v_backups)); if (unlikely(__pyx_t_4 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 6721, __pyx_L1_error)
6722:
+6723: cdef vint write_same_(self, addr_t address, Memory data, bint clear, list backups) except -1:
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_6Memory_write_same_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, addr_t __pyx_v_address, struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_data, int __pyx_v_clear, PyObject *__pyx_v_backups) { addr_t __pyx_v_data_start; addr_t __pyx_v_data_endex; addr_t __pyx_v_size; Rack_ const *__pyx_v_blocks; size_t __pyx_v_block_index; Block_ const *__pyx_v_block; addr_t __pyx_v_block_start; addr_t __pyx_v_block_endex; __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("write_same_", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_4); __Pyx_AddTraceback("bytesparse._c.Memory.write_same_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
6724: cdef:
6725: addr_t data_start
6726: addr_t data_endex
6727: addr_t size
6728: const Rack_* blocks
6729: size_t block_index
6730: const Block_* block
6731: addr_t block_start
6732: addr_t block_endex
6733:
+6734: data_start = data.start_()
__pyx_v_data_start = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_data->__pyx_vtab)->start_(__pyx_v_data);
+6735: data_endex = data.endex_()
__pyx_v_data_endex = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_data->__pyx_vtab)->endex_(__pyx_v_data);
+6736: size = data_endex - data_start
__pyx_v_size = (__pyx_v_data_endex - __pyx_v_data_start);
+6737: blocks = data._
__pyx_t_1 = __pyx_v_data->_; __pyx_v_blocks = __pyx_t_1;
6738:
+6739: if size:
__pyx_t_2 = (__pyx_v_size != 0); if (__pyx_t_2) { /* … */ }
+6740: if clear:
__pyx_t_2 = (__pyx_v_clear != 0); if (__pyx_t_2) { /* … */ goto __pyx_L4; }
6741: # Clear anything between source data boundaries
+6742: if backups is not None:
__pyx_t_2 = (__pyx_v_backups != ((PyObject*)Py_None)); __pyx_t_3 = (__pyx_t_2 != 0); if (__pyx_t_3) { /* … */ }
+6743: backups.append(self.extract_(data_start, data_endex, 0, NULL, 1, True))
if (unlikely(__pyx_v_backups == Py_None)) { PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "append"); __PYX_ERR(0, 6743, __pyx_L1_error) } __pyx_t_4 = ((PyObject *)((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->extract_(__pyx_v_self, __pyx_v_data_start, __pyx_v_data_endex, 0, NULL, 1, 1)); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 6743, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __pyx_t_5 = __Pyx_PyList_Append(__pyx_v_backups, __pyx_t_4); if (unlikely(__pyx_t_5 == ((int)-1))) __PYX_ERR(0, 6743, __pyx_L1_error) __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
6744:
+6745: self._erase_(data_start, data_endex, False, True) # insert
__pyx_t_3 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_erase_(__pyx_v_self, __pyx_v_data_start, __pyx_v_data_endex, 0, 1); if (unlikely(__pyx_t_3 == ((int)-1))) __PYX_ERR(0, 6745, __pyx_L1_error)
6746:
6747: else:
6748: # Clear only overwritten ranges
+6749: for block_index in range(Rack_Length(blocks)):
/*else*/ { __pyx_t_6 = __pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks); __pyx_t_7 = __pyx_t_6; for (__pyx_t_8 = 0; __pyx_t_8 < __pyx_t_7; __pyx_t_8+=1) { __pyx_v_block_index = __pyx_t_8;
+6750: block = Rack_Get__(blocks, block_index)
__pyx_v_block = __pyx_f_10bytesparse_2_c_Rack_Get__(__pyx_v_blocks, __pyx_v_block_index);
6751:
+6752: block_start = Block_Start(block)
__pyx_v_block_start = __pyx_f_10bytesparse_2_c_Block_Start(__pyx_v_block);
+6753: CheckAddAddrU(block_start, address)
__pyx_t_9 = __pyx_f_10bytesparse_2_c_CheckAddAddrU(__pyx_v_block_start, __pyx_v_address); if (unlikely(__pyx_t_9 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 6753, __pyx_L1_error)
+6754: block_start += address
__pyx_v_block_start = (__pyx_v_block_start + __pyx_v_address);
6755:
+6756: block_endex = Block_Endex(block)
__pyx_v_block_endex = __pyx_f_10bytesparse_2_c_Block_Endex(__pyx_v_block);
+6757: CheckAddAddrU(block_endex, address)
__pyx_t_9 = __pyx_f_10bytesparse_2_c_CheckAddAddrU(__pyx_v_block_endex, __pyx_v_address); if (unlikely(__pyx_t_9 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 6757, __pyx_L1_error)
+6758: block_endex += address
__pyx_v_block_endex = (__pyx_v_block_endex + __pyx_v_address);
6759:
+6760: if backups is not None:
__pyx_t_3 = (__pyx_v_backups != ((PyObject*)Py_None)); __pyx_t_2 = (__pyx_t_3 != 0); if (__pyx_t_2) { /* … */ }
+6761: backups.append(self.extract_(block_start, block_endex, 0, NULL, 1, True))
if (unlikely(__pyx_v_backups == Py_None)) { PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "append"); __PYX_ERR(0, 6761, __pyx_L1_error) } __pyx_t_4 = ((PyObject *)((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->extract_(__pyx_v_self, __pyx_v_block_start, __pyx_v_block_endex, 0, NULL, 1, 1)); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 6761, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __pyx_t_5 = __Pyx_PyList_Append(__pyx_v_backups, __pyx_t_4); if (unlikely(__pyx_t_5 == ((int)-1))) __PYX_ERR(0, 6761, __pyx_L1_error) __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
6762:
+6763: self._erase_(block_start, block_endex, False, True) # insert
__pyx_t_2 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_erase_(__pyx_v_self, __pyx_v_block_start, __pyx_v_block_endex, 0, 1); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 6763, __pyx_L1_error)
}
}
__pyx_L4:;
6764:
+6765: for block_index in range(Rack_Length(blocks)):
__pyx_t_6 = __pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks); __pyx_t_7 = __pyx_t_6; for (__pyx_t_8 = 0; __pyx_t_8 < __pyx_t_7; __pyx_t_8+=1) { __pyx_v_block_index = __pyx_t_8;
+6766: block = Rack_Get__(blocks, block_index)
__pyx_v_block = __pyx_f_10bytesparse_2_c_Rack_Get__(__pyx_v_blocks, __pyx_v_block_index);
+6767: block_start = Block_Start(block)
__pyx_v_block_start = __pyx_f_10bytesparse_2_c_Block_Start(__pyx_v_block);
+6768: CheckAddAddrU(block_start, address)
__pyx_t_9 = __pyx_f_10bytesparse_2_c_CheckAddAddrU(__pyx_v_block_start, __pyx_v_address); if (unlikely(__pyx_t_9 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 6768, __pyx_L1_error)
+6769: self._insert_(block_start + address, Block_Length(block), Block_At__(block, 0), False)
__pyx_t_2 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_insert_(__pyx_v_self, (__pyx_v_block_start + __pyx_v_address), __pyx_f_10bytesparse_2_c_Block_Length(__pyx_v_block), __pyx_f_10bytesparse_2_c_Block_At__(__pyx_v_block, 0), 0); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 6769, __pyx_L1_error)
}
6770:
+6771: self._crop_(self._trim_start, self._trim_endex, None) # FIXME: prevent after-cropping; trim while writing
__pyx_t_9 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_crop_(__pyx_v_self, __pyx_v_self->_trim_start, __pyx_v_self->_trim_endex, ((PyObject*)Py_None)); if (unlikely(__pyx_t_9 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 6771, __pyx_L1_error)
6772:
+6773: cdef vint write_raw_(self, addr_t address, size_t data_size, const byte_t* data_ptr, list backups) except -1:
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_6Memory_write_raw_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, addr_t __pyx_v_address, size_t __pyx_v_data_size, byte_t const *__pyx_v_data_ptr, PyObject *__pyx_v_backups) { addr_t __pyx_v_size; addr_t __pyx_v_start; addr_t __pyx_v_endex; addr_t __pyx_v_trim_start; addr_t __pyx_v_trim_endex; addr_t __pyx_v_offset; __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("write_raw_", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_5); __Pyx_AddTraceback("bytesparse._c.Memory.write_raw_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
6774: cdef:
+6775: addr_t size = data_size
__pyx_v_size = __pyx_v_data_size;
6776: addr_t start
6777: addr_t endex
6778: addr_t trim_start
6779: addr_t trim_endex
6780: addr_t offset
6781:
+6782: if CannotAddAddrU(address, size):
__pyx_t_1 = (__pyx_f_10bytesparse_2_c_CannotAddAddrU(__pyx_v_address, __pyx_v_size) != 0); if (__pyx_t_1) { /* … */ }
+6783: size = ADDR_MAX - address
__pyx_v_size = (ADDR_MAX - __pyx_v_address);
6784:
+6785: if size:
__pyx_t_1 = (__pyx_v_size != 0); if (__pyx_t_1) { /* … */ }
+6786: start = address
__pyx_v_start = __pyx_v_address;
+6787: endex = start + size
__pyx_v_endex = (__pyx_v_start + __pyx_v_size);
6788:
+6789: trim_endex = self._trim_endex if self._trim_endex_ else ADDR_MAX
if ((__pyx_v_self->_trim_endex_ != 0)) { __pyx_t_2 = __pyx_v_self->_trim_endex; } else { __pyx_t_2 = ADDR_MAX; } __pyx_v_trim_endex = __pyx_t_2;
+6790: if start >= trim_endex:
__pyx_t_1 = ((__pyx_v_start >= __pyx_v_trim_endex) != 0); if (__pyx_t_1) { /* … */ }
+6791: return 0
__pyx_r = 0; goto __pyx_L0;
+6792: elif endex > trim_endex:
__pyx_t_1 = ((__pyx_v_endex > __pyx_v_trim_endex) != 0); if (__pyx_t_1) { /* … */ }
+6793: size -= endex - trim_endex
__pyx_v_size = (__pyx_v_size - (__pyx_v_endex - __pyx_v_trim_endex));
+6794: endex = start + size
__pyx_v_endex = (__pyx_v_start + __pyx_v_size);
6795:
+6796: trim_start = self._trim_start if self._trim_start_ else ADDR_MIN
if ((__pyx_v_self->_trim_start_ != 0)) { __pyx_t_2 = __pyx_v_self->_trim_start; } else { __pyx_t_2 = ADDR_MIN; } __pyx_v_trim_start = __pyx_t_2;
+6797: if endex <= trim_start:
__pyx_t_1 = ((__pyx_v_endex <= __pyx_v_trim_start) != 0); if (__pyx_t_1) { /* … */ }
+6798: return 0
__pyx_r = 0; goto __pyx_L0;
+6799: elif trim_start > start:
__pyx_t_1 = ((__pyx_v_trim_start > __pyx_v_start) != 0); if (__pyx_t_1) { /* … */ }
+6800: offset = trim_start - start
__pyx_v_offset = (__pyx_v_trim_start - __pyx_v_start);
+6801: size -= offset
__pyx_v_size = (__pyx_v_size - __pyx_v_offset);
+6802: start += offset
__pyx_v_start = (__pyx_v_start + __pyx_v_offset);
+6803: endex = start + size
__pyx_v_endex = (__pyx_v_start + __pyx_v_size);
+6804: data_ptr += offset
__pyx_v_data_ptr = (__pyx_v_data_ptr + __pyx_v_offset);
6805:
+6806: CheckAddrToSizeU(size)
__pyx_t_3 = __pyx_f_10bytesparse_2_c_CheckAddrToSizeU(__pyx_v_size); if (unlikely(__pyx_t_3 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 6806, __pyx_L1_error)
+6807: if backups is not None:
__pyx_t_1 = (__pyx_v_backups != ((PyObject*)Py_None)); __pyx_t_4 = (__pyx_t_1 != 0); if (__pyx_t_4) { /* … */ }
+6808: backups.append(self.extract_(start, endex, 0, NULL, 1, True))
if (unlikely(__pyx_v_backups == Py_None)) { PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "append"); __PYX_ERR(0, 6808, __pyx_L1_error) } __pyx_t_5 = ((PyObject *)((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->extract_(__pyx_v_self, __pyx_v_start, __pyx_v_endex, 0, NULL, 1, 1)); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 6808, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __pyx_t_6 = __Pyx_PyList_Append(__pyx_v_backups, __pyx_t_5); if (unlikely(__pyx_t_6 == ((int)-1))) __PYX_ERR(0, 6808, __pyx_L1_error) __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
6809:
+6810: if size == 1:
__pyx_t_4 = ((__pyx_v_size == 1) != 0); if (__pyx_t_4) { /* … */ goto __pyx_L8; }
+6811: self.poke_(start, data_ptr[0]) # might be faster
__pyx_t_7 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->poke_(__pyx_v_self, __pyx_v_start, (__pyx_v_data_ptr[0])); if (unlikely(__pyx_t_7 == ((int)-2))) __PYX_ERR(0, 6811, __pyx_L1_error)
6812: else:
+6813: self._erase_(start, endex, False, True) # insert
/*else*/ {
__pyx_t_4 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_erase_(__pyx_v_self, __pyx_v_start, __pyx_v_endex, 0, 1); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(0, 6813, __pyx_L1_error)
+6814: self._insert_(start, <size_t>size, data_ptr, False)
__pyx_t_4 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_insert_(__pyx_v_self, __pyx_v_start, ((size_t)__pyx_v_size), __pyx_v_data_ptr, 0); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(0, 6814, __pyx_L1_error)
}
__pyx_L8:;
6815:
+6816: def write(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_109write(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_108write[] = "Memory.write(self: u'Memory', address: Address, data: Union[AnyBytes, Value, u'Memory'], clear: bool = False, backups: Optional[MemoryList] = None) -> None\nWrites data.\n\n Arguments:\n address (int):\n Address where to start writing data.\n\n data (bytes):\n Data to write.\n\n clear (bool):\n Clears the target range before writing data.\n Useful only if `data` is a :obj:`Memory` with empty spaces.\n\n backups (list of :obj:`Memory`):\n Optional output list holding backup copies of the deleted\n items.\n\n Example:\n +---+---+---+---+---+---+---+---+---+---+\n | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |\n +===+===+===+===+===+===+===+===+===+===+\n | |[A | B | C]| | |[x | y | z]| |\n +---+---+---+---+---+---+---+---+---+---+\n | |[A | B | C]| |[1 | 2 | 3 | z]| |\n +---+---+---+---+---+---+---+---+---+---+\n\n >>> memory = Memory(blocks=[[1, b'ABC'], [6, b'xyz']])\n >>> memory.write(5, b'123')\n >>> memory._blocks\n [[1, b'ABC'], [5, b'123z']]\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_109write(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_address = 0; PyObject *__pyx_v_data = 0; PyObject *__pyx_v_clear = 0; PyObject *__pyx_v_backups = 0; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("write (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_address,&__pyx_n_s_data,&__pyx_n_s_clear,&__pyx_n_s_backups,0}; PyObject* values[4] = {0,0,0,0}; /* … */ /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_108write(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_address, PyObject *__pyx_v_data, PyObject *__pyx_v_clear, PyObject *__pyx_v_backups) { addr_t __pyx_v_address_; __Pyx_memviewslice __pyx_v_data_view = { 0, 0, { 0 }, { 0 }, { 0 } }; byte_t __pyx_v_data_value; size_t __pyx_v_data_size; byte_t const *__pyx_v_data_ptr; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("write", 0); /* … */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; __pyx_L1_error:; __PYX_XDEC_MEMVIEW(&__pyx_t_6, 1); __Pyx_AddTraceback("bytesparse._c.Memory.write", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __PYX_XDEC_MEMVIEW(&__pyx_v_data_view, 1); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
6817: self: 'Memory',
6818: address: Address,
6819: data: Union[AnyBytes, Value, 'Memory'],
+6820: clear: bool = False,
values[2] = ((PyObject *)Py_False);
+6821: backups: Optional[MemoryList] = None,
values[3] = ((PyObject *)Py_None); if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); CYTHON_FALLTHROUGH; case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_address)) != 0)) kw_args--; else goto __pyx_L5_argtuple_error; CYTHON_FALLTHROUGH; case 1: if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_data)) != 0)) kw_args--; else { __Pyx_RaiseArgtupleInvalid("write", 0, 2, 4, 1); __PYX_ERR(0, 6816, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 2: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_clear); if (value) { values[2] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 3: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_backups); if (value) { values[3] = value; kw_args--; } } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "write") < 0)) __PYX_ERR(0, 6816, __pyx_L3_error) } } else { switch (PyTuple_GET_SIZE(__pyx_args)) { case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); CYTHON_FALLTHROUGH; case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); values[0] = PyTuple_GET_ITEM(__pyx_args, 0); break; default: goto __pyx_L5_argtuple_error; } } __pyx_v_address = values[0]; __pyx_v_data = values[1]; __pyx_v_clear = values[2]; __pyx_v_backups = values[3]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("write", 0, 2, 4, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 6816, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("bytesparse._c.Memory.write", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_108write(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), __pyx_v_address, __pyx_v_data, __pyx_v_clear, __pyx_v_backups);
6822: ) -> None:
6823: r"""Writes data.
6824:
6825: Arguments:
6826: address (int):
6827: Address where to start writing data.
6828:
6829: data (bytes):
6830: Data to write.
6831:
6832: clear (bool):
6833: Clears the target range before writing data.
6834: Useful only if `data` is a :obj:`Memory` with empty spaces.
6835:
6836: backups (list of :obj:`Memory`):
6837: Optional output list holding backup copies of the deleted
6838: items.
6839:
6840: Example:
6841: +---+---+---+---+---+---+---+---+---+---+
6842: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
6843: +===+===+===+===+===+===+===+===+===+===+
6844: | |[A | B | C]| | |[x | y | z]| |
6845: +---+---+---+---+---+---+---+---+---+---+
6846: | |[A | B | C]| |[1 | 2 | 3 | z]| |
6847: +---+---+---+---+---+---+---+---+---+---+
6848:
6849: >>> memory = Memory(blocks=[[1, b'ABC'], [6, b'xyz']])
6850: >>> memory.write(5, b'123')
6851: >>> memory._blocks
6852: [[1, b'ABC'], [5, b'123z']]
6853: """
6854: cdef:
+6855: addr_t address_ = <addr_t>address
__pyx_t_1 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_address); if (unlikely((__pyx_t_1 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 6855, __pyx_L1_error) __pyx_v_address_ = ((addr_t)__pyx_t_1);
6856: const byte_t[:] data_view
6857: byte_t data_value
6858: size_t data_size
6859: const byte_t* data_ptr
6860:
+6861: if isinstance(data, Memory):
__pyx_t_2 = __Pyx_TypeCheck(__pyx_v_data, __pyx_ptype_10bytesparse_2_c_Memory);
__pyx_t_3 = (__pyx_t_2 != 0);
if (__pyx_t_3) {
/* … */
goto __pyx_L3;
}
+6862: self.write_same_(address_, data, <bint>clear, backups)
if (!(likely(((__pyx_v_data) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_data, __pyx_ptype_10bytesparse_2_c_Memory))))) __PYX_ERR(0, 6862, __pyx_L1_error) __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_v_clear); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 6862, __pyx_L1_error) if (!(likely(PyList_CheckExact(__pyx_v_backups))||((__pyx_v_backups) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "list", Py_TYPE(__pyx_v_backups)->tp_name), 0))) __PYX_ERR(0, 6862, __pyx_L1_error) __pyx_t_4 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->write_same_(__pyx_v_self, __pyx_v_address_, ((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_data), __pyx_t_3, ((PyObject*)__pyx_v_backups)); if (unlikely(__pyx_t_4 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 6862, __pyx_L1_error)
6863:
6864: else:
+6865: if isinstance(data, int):
/*else*/ {
__pyx_t_3 = PyInt_Check(__pyx_v_data);
__pyx_t_2 = (__pyx_t_3 != 0);
if (__pyx_t_2) {
/* … */
goto __pyx_L4;
}
+6866: data_value = <byte_t>data
__pyx_t_5 = __Pyx_PyInt_As_byte_t(__pyx_v_data); if (unlikely((__pyx_t_5 == ((byte_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 6866, __pyx_L1_error) __pyx_v_data_value = ((byte_t)__pyx_t_5);
+6867: data_size = 1
__pyx_v_data_size = 1;
+6868: data_ptr = &data_value
__pyx_v_data_ptr = (&__pyx_v_data_value);
6869: else:
+6870: data_view = data
/*else*/ { __pyx_t_6 = __Pyx_PyObject_to_MemoryviewSlice_ds_nn_byte_t__const__(__pyx_v_data, 0); if (unlikely(!__pyx_t_6.memview)) __PYX_ERR(0, 6870, __pyx_L1_error) __pyx_v_data_view = __pyx_t_6; __pyx_t_6.memview = NULL; __pyx_t_6.data = NULL;
+6871: data_size = len(data_view)
__pyx_t_7 = __Pyx_MemoryView_Len(__pyx_v_data_view);
__pyx_v_data_size = __pyx_t_7;
6872: with cython.boundscheck(False):
+6873: data_ptr = &data_view[0]
__pyx_t_8 = 0; if (__pyx_t_8 < 0) __pyx_t_8 += __pyx_v_data_view.shape[0]; __pyx_v_data_ptr = (&(*((byte_t const *) ( /* dim=0 */ (__pyx_v_data_view.data + __pyx_t_8 * __pyx_v_data_view.strides[0]) )))); } __pyx_L4:;
6874:
+6875: self.write_raw_(address_, data_size, data_ptr, backups)
if (!(likely(PyList_CheckExact(__pyx_v_backups))||((__pyx_v_backups) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "list", Py_TYPE(__pyx_v_backups)->tp_name), 0))) __PYX_ERR(0, 6875, __pyx_L1_error) __pyx_t_4 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->write_raw_(__pyx_v_self, __pyx_v_address_, __pyx_v_data_size, __pyx_v_data_ptr, ((PyObject*)__pyx_v_backups)); if (unlikely(__pyx_t_4 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 6875, __pyx_L1_error) } __pyx_L3:;
6876:
+6877: cdef vint fill_(self, addr_t start, addr_t endex, Block_** pattern, list backups, addr_t start_) except -1:
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_6Memory_fill_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, addr_t __pyx_v_start, addr_t __pyx_v_endex, Block_ **__pyx_v_pattern, PyObject *__pyx_v_backups, addr_t __pyx_v_start_) { size_t __pyx_v_offset; size_t __pyx_v_size; __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("fill_", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("bytesparse._c.Memory.fill_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; }
6878: cdef:
6879: size_t offset
6880: size_t size
6881:
+6882: if start < endex:
__pyx_t_1 = ((__pyx_v_start < __pyx_v_endex) != 0); if (__pyx_t_1) { /* … */ }
+6883: CheckAddrToSizeU(endex - start)
__pyx_t_2 = __pyx_f_10bytesparse_2_c_CheckAddrToSizeU((__pyx_v_endex - __pyx_v_start)); if (unlikely(__pyx_t_2 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 6883, __pyx_L1_error)
+6884: if not Block_Length(pattern[0]):
__pyx_t_1 = ((!(__pyx_f_10bytesparse_2_c_Block_Length((__pyx_v_pattern[0])) != 0)) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+6885: raise ValueError('non-empty pattern required')
__pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__15, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 6885, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_Raise(__pyx_t_3, 0, 0, 0); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(0, 6885, __pyx_L1_error)
6886:
+6887: if start > start_:
__pyx_t_1 = ((__pyx_v_start > __pyx_v_start_) != 0); if (__pyx_t_1) { /* … */ }
+6888: offset = start - start_
__pyx_v_offset = (__pyx_v_start - __pyx_v_start_);
+6889: CheckAddrToSizeU(offset)
__pyx_t_2 = __pyx_f_10bytesparse_2_c_CheckAddrToSizeU(__pyx_v_offset); if (unlikely(__pyx_t_2 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 6889, __pyx_L1_error)
+6890: Block_RotateLeft_(pattern[0], <size_t>offset)
__pyx_f_10bytesparse_2_c_Block_RotateLeft_((__pyx_v_pattern[0]), ((size_t)__pyx_v_offset));
6891:
6892: # Resize the pattern to the target range
+6893: size = <size_t>(endex - start)
__pyx_v_size = ((size_t)(__pyx_v_endex - __pyx_v_start));
+6894: pattern[0] = Block_RepeatToSize(pattern[0], size)
__pyx_t_4 = __pyx_f_10bytesparse_2_c_Block_RepeatToSize((__pyx_v_pattern[0]), __pyx_v_size); if (unlikely(__pyx_t_4 == ((Block_ *)NULL))) __PYX_ERR(0, 6894, __pyx_L1_error)
(__pyx_v_pattern[0]) = __pyx_t_4;
6895:
+6896: if backups is not None:
__pyx_t_1 = (__pyx_v_backups != ((PyObject*)Py_None)); __pyx_t_5 = (__pyx_t_1 != 0); if (__pyx_t_5) { /* … */ }
+6897: backups.append(self.extract_(start, endex, 0, NULL, 1, True))
if (unlikely(__pyx_v_backups == Py_None)) { PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "append"); __PYX_ERR(0, 6897, __pyx_L1_error) } __pyx_t_3 = ((PyObject *)((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->extract_(__pyx_v_self, __pyx_v_start, __pyx_v_endex, 0, NULL, 1, 1)); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 6897, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_6 = __Pyx_PyList_Append(__pyx_v_backups, __pyx_t_3); if (unlikely(__pyx_t_6 == ((int)-1))) __PYX_ERR(0, 6897, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
6898:
6899: # Standard write method
+6900: self._erase_(start, endex, False, True) # insert
__pyx_t_5 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_erase_(__pyx_v_self, __pyx_v_start, __pyx_v_endex, 0, 1); if (unlikely(__pyx_t_5 == ((int)-1))) __PYX_ERR(0, 6900, __pyx_L1_error)
+6901: self._insert_(start, size, Block_At__(pattern[0], 0), False)
__pyx_t_5 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->_insert_(__pyx_v_self, __pyx_v_start, __pyx_v_size, __pyx_f_10bytesparse_2_c_Block_At__((__pyx_v_pattern[0]), 0), 0); if (unlikely(__pyx_t_5 == ((int)-1))) __PYX_ERR(0, 6901, __pyx_L1_error)
6902:
+6903: def fill(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_111fill(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_110fill[] = "Memory.fill(self: u'Memory', start: Optional[Address] = None, endex: Optional[Address] = None, pattern: Union[AnyBytes, Value] = 0, backups: Optional[MemoryList] = None) -> None\nOverwrites a range with a pattern.\n\n Arguments:\n start (int):\n Inclusive start address for filling.\n If ``None``, :attr:`start` is considered.\n\n endex (int):\n Exclusive end address for filling.\n If ``None``, :attr:`endex` is considered.\n\n pattern (items):\n Pattern of items to fill the range.\n\n backups (list of :obj:`Memory`):\n Optional output list holding backup copies of the deleted\n items, before trimming.\n\n Examples:\n +---+---+---+---+---+---+---+---+---+---+\n | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |\n +===+===+===+===+===+===+===+===+===+===+\n | |[A | B | C]| | |[x | y | z]| |\n +---+---+---+---+---+---+---+---+---+---+\n | |[1 | 2 | 3 | 1 | 2 | 3 | 1 | 2]| |\n +---+---+---+---+---+---+---+---+---+---+\n\n >>> memory = Memory(blocks=[[1, b'ABC'], [6, b'xyz']])\n >>> memory.fill(pattern=b'123')\n >>> memory._blocks\n [[1, b'12312312']]\n\n ~~~\n\n +---+---+---+---+---+---+---+---+---+---+\n | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |\n +===+===+===+===+===+===+===+===+===+===+\n | |[A | B | C]| | |[x | y | z]| |\n +---+---+---+---+---+---+---+---+---+---+\n | |[A | B | 1 | 2 | 3 | 1 | y | z]| |\n +---+---+---+---+---+---+---+---+---+---+\n\n >>> memory = Memory(blocks=[[1, b'ABC'], [6, b'xyz']])\n >>> memory.fill(3, 7, b'123')\n >>> memory._blocks\n [[1, b'AB1231yz']]\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_111fill(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_start = 0; PyObject *__pyx_v_endex = 0; PyObject *__pyx_v_pattern = 0; PyObject *__pyx_v_backups = 0; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("fill (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_start,&__pyx_n_s_endex,&__pyx_n_s_pattern,&__pyx_n_s_backups,0}; PyObject* values[4] = {0,0,0,0}; /* … */ /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_110fill(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_start, PyObject *__pyx_v_endex, PyObject *__pyx_v_pattern, PyObject *__pyx_v_backups) { addr_t __pyx_v_start__; addr_t __pyx_v_start_; addr_t __pyx_v_endex_; Block_ *__pyx_v_pattern_; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("fill", 0); /* … */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Memory.fill", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
6904: self: 'Memory',
+6905: start: Optional[Address] = None,
values[0] = ((PyObject *)Py_None);
+6906: endex: Optional[Address] = None,
values[1] = ((PyObject *)Py_None); values[2] = ((PyObject *)__pyx_int_0);
6907: pattern: Union[AnyBytes, Value] = 0,
+6908: backups: Optional[MemoryList] = None,
values[3] = ((PyObject *)Py_None); if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); CYTHON_FALLTHROUGH; case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_start); if (value) { values[0] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 1: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_endex); if (value) { values[1] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 2: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pattern); if (value) { values[2] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 3: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_backups); if (value) { values[3] = value; kw_args--; } } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "fill") < 0)) __PYX_ERR(0, 6903, __pyx_L3_error) } } else { switch (PyTuple_GET_SIZE(__pyx_args)) { case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); CYTHON_FALLTHROUGH; case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } } __pyx_v_start = values[0]; __pyx_v_endex = values[1]; __pyx_v_pattern = values[2]; __pyx_v_backups = values[3]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("fill", 0, 0, 4, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 6903, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("bytesparse._c.Memory.fill", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_110fill(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), __pyx_v_start, __pyx_v_endex, __pyx_v_pattern, __pyx_v_backups);
6909: ) -> None:
6910: r"""Overwrites a range with a pattern.
6911:
6912: Arguments:
6913: start (int):
6914: Inclusive start address for filling.
6915: If ``None``, :attr:`start` is considered.
6916:
6917: endex (int):
6918: Exclusive end address for filling.
6919: If ``None``, :attr:`endex` is considered.
6920:
6921: pattern (items):
6922: Pattern of items to fill the range.
6923:
6924: backups (list of :obj:`Memory`):
6925: Optional output list holding backup copies of the deleted
6926: items, before trimming.
6927:
6928: Examples:
6929: +---+---+---+---+---+---+---+---+---+---+
6930: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
6931: +===+===+===+===+===+===+===+===+===+===+
6932: | |[A | B | C]| | |[x | y | z]| |
6933: +---+---+---+---+---+---+---+---+---+---+
6934: | |[1 | 2 | 3 | 1 | 2 | 3 | 1 | 2]| |
6935: +---+---+---+---+---+---+---+---+---+---+
6936:
6937: >>> memory = Memory(blocks=[[1, b'ABC'], [6, b'xyz']])
6938: >>> memory.fill(pattern=b'123')
6939: >>> memory._blocks
6940: [[1, b'12312312']]
6941:
6942: ~~~
6943:
6944: +---+---+---+---+---+---+---+---+---+---+
6945: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
6946: +===+===+===+===+===+===+===+===+===+===+
6947: | |[A | B | C]| | |[x | y | z]| |
6948: +---+---+---+---+---+---+---+---+---+---+
6949: | |[A | B | 1 | 2 | 3 | 1 | y | z]| |
6950: +---+---+---+---+---+---+---+---+---+---+
6951:
6952: >>> memory = Memory(blocks=[[1, b'ABC'], [6, b'xyz']])
6953: >>> memory.fill(3, 7, b'123')
6954: >>> memory._blocks
6955: [[1, b'AB1231yz']]
6956: """
6957: cdef:
6958: addr_t start__
6959: addr_t start_
6960: addr_t endex_
+6961: Block_* pattern_ = NULL
__pyx_v_pattern_ = NULL;
6962:
+6963: start_, endex_ = self.bound_(start, endex)
__pyx_t_1 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->bound_(__pyx_v_self, __pyx_v_start, __pyx_v_endex); __pyx_t_2 = __pyx_t_1.f0; __pyx_t_3 = __pyx_t_1.f1; __pyx_v_start_ = __pyx_t_2; __pyx_v_endex_ = __pyx_t_3;
+6964: if start_ < endex_:
__pyx_t_4 = ((__pyx_v_start_ < __pyx_v_endex_) != 0); if (__pyx_t_4) { /* … */ }
+6965: pattern_ = Block_FromObject(0, pattern, False) # size checked later on
__pyx_t_5 = __pyx_f_10bytesparse_2_c_Block_FromObject(0, __pyx_v_pattern, 0); if (unlikely(__pyx_t_5 == ((Block_ *)NULL))) __PYX_ERR(0, 6965, __pyx_L1_error)
__pyx_v_pattern_ = __pyx_t_5;
+6966: try:
/*try:*/ {
+6967: start__ = self.start_() if start is None else <addr_t>start
__pyx_t_4 = (__pyx_v_start == Py_None); if ((__pyx_t_4 != 0)) { __pyx_t_3 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->start_(__pyx_v_self); } else { __pyx_t_2 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_start); if (unlikely((__pyx_t_2 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 6967, __pyx_L5_error) __pyx_t_3 = ((addr_t)__pyx_t_2); } __pyx_v_start__ = __pyx_t_3;
+6968: self.fill_(start_, endex_, &pattern_, backups, start__)
if (!(likely(PyList_CheckExact(__pyx_v_backups))||((__pyx_v_backups) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "list", Py_TYPE(__pyx_v_backups)->tp_name), 0))) __PYX_ERR(0, 6968, __pyx_L5_error) __pyx_t_6 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->fill_(__pyx_v_self, __pyx_v_start_, __pyx_v_endex_, (&__pyx_v_pattern_), ((PyObject*)__pyx_v_backups), __pyx_v_start__); if (unlikely(__pyx_t_6 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 6968, __pyx_L5_error) }
6969: finally:
+6970: Block_Free(pattern_) # orphan
/*finally:*/ { /*normal exit:*/{ (void)(__pyx_f_10bytesparse_2_c_Block_Free(__pyx_v_pattern_)); goto __pyx_L6; } __pyx_L5_error:; /*exception exit:*/{ __Pyx_PyThreadState_declare __Pyx_PyThreadState_assign __pyx_t_10 = 0; __pyx_t_11 = 0; __pyx_t_12 = 0; __pyx_t_13 = 0; __pyx_t_14 = 0; __pyx_t_15 = 0; if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_13, &__pyx_t_14, &__pyx_t_15); if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_10, &__pyx_t_11, &__pyx_t_12) < 0)) __Pyx_ErrFetch(&__pyx_t_10, &__pyx_t_11, &__pyx_t_12); __Pyx_XGOTREF(__pyx_t_10); __Pyx_XGOTREF(__pyx_t_11); __Pyx_XGOTREF(__pyx_t_12); __Pyx_XGOTREF(__pyx_t_13); __Pyx_XGOTREF(__pyx_t_14); __Pyx_XGOTREF(__pyx_t_15); __pyx_t_7 = __pyx_lineno; __pyx_t_8 = __pyx_clineno; __pyx_t_9 = __pyx_filename; { (void)(__pyx_f_10bytesparse_2_c_Block_Free(__pyx_v_pattern_)); } if (PY_MAJOR_VERSION >= 3) { __Pyx_XGIVEREF(__pyx_t_13); __Pyx_XGIVEREF(__pyx_t_14); __Pyx_XGIVEREF(__pyx_t_15); __Pyx_ExceptionReset(__pyx_t_13, __pyx_t_14, __pyx_t_15); } __Pyx_XGIVEREF(__pyx_t_10); __Pyx_XGIVEREF(__pyx_t_11); __Pyx_XGIVEREF(__pyx_t_12); __Pyx_ErrRestore(__pyx_t_10, __pyx_t_11, __pyx_t_12); __pyx_t_10 = 0; __pyx_t_11 = 0; __pyx_t_12 = 0; __pyx_t_13 = 0; __pyx_t_14 = 0; __pyx_t_15 = 0; __pyx_lineno = __pyx_t_7; __pyx_clineno = __pyx_t_8; __pyx_filename = __pyx_t_9; goto __pyx_L1_error; } __pyx_L6:; }
6971:
+6972: cdef vint flood_(self, addr_t start, addr_t endex, Block_** pattern, list backups) except -1:
static __pyx_t_10bytesparse_2_c_vint __pyx_f_10bytesparse_2_c_6Memory_flood_(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, addr_t __pyx_v_start, addr_t __pyx_v_endex, Block_ **__pyx_v_pattern, PyObject *__pyx_v_backups) { Rack_ *__pyx_v_blocks; Block_ const *__pyx_v_block; addr_t __pyx_v_block_start; addr_t __pyx_v_block_endex; size_t __pyx_v_block_index_start; size_t __pyx_v_block_index_endex; addr_t __pyx_v_offset; PyObject *__pyx_v_gap_start = NULL; PyObject *__pyx_v_gap_endex = NULL; size_t __pyx_v_size; size_t __pyx_v_block_index; __pyx_t_10bytesparse_2_c_vint __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("flood_", 0); /* … */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_6); __Pyx_XDECREF(__pyx_t_7); __Pyx_XDECREF(__pyx_t_8); __Pyx_XDECREF(__pyx_t_9); __Pyx_XDECREF(__pyx_t_10); __Pyx_XDECREF(__pyx_t_12); __Pyx_AddTraceback("bytesparse._c.Memory.flood_", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_XDECREF(__pyx_v_gap_start); __Pyx_XDECREF(__pyx_v_gap_endex); __Pyx_RefNannyFinishContext(); return __pyx_r; }
6973: cdef:
6974: Rack_* blocks
6975: const Block_* block
6976: addr_t block_start
6977: addr_t block_endex
6978: size_t block_index_start
6979: size_t block_index_endex
6980: addr_t offset
6981:
+6982: if start < endex:
__pyx_t_1 = ((__pyx_v_start < __pyx_v_endex) != 0); if (__pyx_t_1) { /* … */ }
+6983: blocks = self._
__pyx_t_2 = __pyx_v_self->_; __pyx_v_blocks = __pyx_t_2;
+6984: block_index_start = Rack_IndexStart(blocks, start)
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Rack_IndexStart(__pyx_v_blocks, __pyx_v_start); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-2L))) __PYX_ERR(0, 6984, __pyx_L1_error)
__pyx_v_block_index_start = __pyx_t_3;
6985:
6986: # Check if touching previous block
+6987: if block_index_start:
__pyx_t_1 = (__pyx_v_block_index_start != 0); if (__pyx_t_1) { /* … */ }
+6988: block = Rack_Get__(blocks, block_index_start - 1)
__pyx_v_block = __pyx_f_10bytesparse_2_c_Rack_Get__(__pyx_v_blocks, (__pyx_v_block_index_start - 1));
+6989: block_start = Block_Start(block)
__pyx_v_block_start = __pyx_f_10bytesparse_2_c_Block_Start(__pyx_v_block);
+6990: block_endex = Block_Endex(block)
__pyx_v_block_endex = __pyx_f_10bytesparse_2_c_Block_Endex(__pyx_v_block);
+6991: if block_endex == start:
__pyx_t_1 = ((__pyx_v_block_endex == __pyx_v_start) != 0); if (__pyx_t_1) { /* … */ }
+6992: block_index_start -= 1
__pyx_v_block_index_start = (__pyx_v_block_index_start - 1);
6993:
6994: # Manage block near start
+6995: if block_index_start < Rack_Length(blocks):
__pyx_t_1 = ((__pyx_v_block_index_start < __pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks)) != 0); if (__pyx_t_1) { /* … */ }
+6996: block = Rack_Get__(blocks, block_index_start)
__pyx_v_block = __pyx_f_10bytesparse_2_c_Rack_Get__(__pyx_v_blocks, __pyx_v_block_index_start);
+6997: block_start = Block_Start(block)
__pyx_v_block_start = __pyx_f_10bytesparse_2_c_Block_Start(__pyx_v_block);
+6998: block_endex = Block_Endex(block)
__pyx_v_block_endex = __pyx_f_10bytesparse_2_c_Block_Endex(__pyx_v_block);
6999:
+7000: if block_start <= start and endex <= block_endex:
__pyx_t_4 = ((__pyx_v_block_start <= __pyx_v_start) != 0); if (__pyx_t_4) { } else { __pyx_t_1 = __pyx_t_4; goto __pyx_L8_bool_binop_done; } __pyx_t_4 = ((__pyx_v_endex <= __pyx_v_block_endex) != 0); __pyx_t_1 = __pyx_t_4; __pyx_L8_bool_binop_done:; if (__pyx_t_1) { /* … */ }
+7001: return 0 # no emptiness to flood
__pyx_r = 0; goto __pyx_L0;
7002:
+7003: if block_start < start:
__pyx_t_1 = ((__pyx_v_block_start < __pyx_v_start) != 0); if (__pyx_t_1) { /* … */ }
+7004: offset = start - block_start
__pyx_v_offset = (__pyx_v_start - __pyx_v_block_start);
+7005: CheckAddrToSizeU(offset)
__pyx_t_5 = __pyx_f_10bytesparse_2_c_CheckAddrToSizeU(__pyx_v_offset); if (unlikely(__pyx_t_5 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 7005, __pyx_L1_error)
+7006: Block_RotateRight_(pattern[0], <size_t>offset)
__pyx_f_10bytesparse_2_c_Block_RotateRight_((__pyx_v_pattern[0]), ((size_t)__pyx_v_offset));
+7007: start = block_start
__pyx_v_start = __pyx_v_block_start;
7008:
7009: # Manage block near end
+7010: block_index_endex = Rack_IndexEndex(blocks, endex)
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Rack_IndexEndex(__pyx_v_blocks, __pyx_v_endex); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-2L))) __PYX_ERR(0, 7010, __pyx_L1_error)
__pyx_v_block_index_endex = __pyx_t_3;
+7011: if block_index_start < block_index_endex:
__pyx_t_1 = ((__pyx_v_block_index_start < __pyx_v_block_index_endex) != 0); if (__pyx_t_1) { /* … */ }
+7012: block = Rack_Get__(blocks, block_index_endex - 1)
__pyx_v_block = __pyx_f_10bytesparse_2_c_Rack_Get__(__pyx_v_blocks, (__pyx_v_block_index_endex - 1));
+7013: block_start = Block_Start(block)
__pyx_v_block_start = __pyx_f_10bytesparse_2_c_Block_Start(__pyx_v_block);
+7014: block_endex = Block_Endex(block)
__pyx_v_block_endex = __pyx_f_10bytesparse_2_c_Block_Endex(__pyx_v_block);
+7015: if endex < block_endex:
__pyx_t_1 = ((__pyx_v_endex < __pyx_v_block_endex) != 0); if (__pyx_t_1) { /* … */ }
+7016: endex = block_endex
__pyx_v_endex = __pyx_v_block_endex;
7017:
+7018: CheckAddrToSizeU(endex - start)
__pyx_t_5 = __pyx_f_10bytesparse_2_c_CheckAddrToSizeU((__pyx_v_endex - __pyx_v_start)); if (unlikely(__pyx_t_5 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 7018, __pyx_L1_error)
+7019: if not Block_Length(pattern[0]):
__pyx_t_1 = ((!(__pyx_f_10bytesparse_2_c_Block_Length((__pyx_v_pattern[0])) != 0)) != 0); if (unlikely(__pyx_t_1)) { /* … */ }
+7020: raise ValueError('non-empty pattern required')
__pyx_t_6 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__15, NULL); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 7020, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __Pyx_Raise(__pyx_t_6, 0, 0, 0); __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __PYX_ERR(0, 7020, __pyx_L1_error)
7021:
+7022: if backups is not None:
__pyx_t_1 = (__pyx_v_backups != ((PyObject*)Py_None)); __pyx_t_4 = (__pyx_t_1 != 0); if (__pyx_t_4) { /* … */ }
+7023: for gap_start, gap_endex in self.gaps(start, endex):
__pyx_t_7 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_gaps); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 7023, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); __pyx_t_8 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_start); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 7023, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_8); __pyx_t_9 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_endex); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 7023, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_9); __pyx_t_10 = NULL; __pyx_t_11 = 0; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) { __pyx_t_10 = PyMethod_GET_SELF(__pyx_t_7); if (likely(__pyx_t_10)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); __Pyx_INCREF(__pyx_t_10); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_7, function); __pyx_t_11 = 1; } } #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_7)) { PyObject *__pyx_temp[3] = {__pyx_t_10, __pyx_t_8, __pyx_t_9}; __pyx_t_6 = __Pyx_PyFunction_FastCall(__pyx_t_7, __pyx_temp+1-__pyx_t_11, 2+__pyx_t_11); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 7023, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; __Pyx_GOTREF(__pyx_t_6); __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_7)) { PyObject *__pyx_temp[3] = {__pyx_t_10, __pyx_t_8, __pyx_t_9}; __pyx_t_6 = __Pyx_PyCFunction_FastCall(__pyx_t_7, __pyx_temp+1-__pyx_t_11, 2+__pyx_t_11); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 7023, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; __Pyx_GOTREF(__pyx_t_6); __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; } else #endif { __pyx_t_12 = PyTuple_New(2+__pyx_t_11); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 7023, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_12); if (__pyx_t_10) { __Pyx_GIVEREF(__pyx_t_10); PyTuple_SET_ITEM(__pyx_t_12, 0, __pyx_t_10); __pyx_t_10 = NULL; } __Pyx_GIVEREF(__pyx_t_8); PyTuple_SET_ITEM(__pyx_t_12, 0+__pyx_t_11, __pyx_t_8); __Pyx_GIVEREF(__pyx_t_9); PyTuple_SET_ITEM(__pyx_t_12, 1+__pyx_t_11, __pyx_t_9); __pyx_t_8 = 0; __pyx_t_9 = 0; __pyx_t_6 = __Pyx_PyObject_Call(__pyx_t_7, __pyx_t_12, NULL); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 7023, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; } __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; if (likely(PyList_CheckExact(__pyx_t_6)) || PyTuple_CheckExact(__pyx_t_6)) { __pyx_t_7 = __pyx_t_6; __Pyx_INCREF(__pyx_t_7); __pyx_t_13 = 0; __pyx_t_14 = NULL; } else { __pyx_t_13 = -1; __pyx_t_7 = PyObject_GetIter(__pyx_t_6); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 7023, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); __pyx_t_14 = Py_TYPE(__pyx_t_7)->tp_iternext; if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 7023, __pyx_L1_error) } __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; for (;;) { if (likely(!__pyx_t_14)) { if (likely(PyList_CheckExact(__pyx_t_7))) { if (__pyx_t_13 >= PyList_GET_SIZE(__pyx_t_7)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS __pyx_t_6 = PyList_GET_ITEM(__pyx_t_7, __pyx_t_13); __Pyx_INCREF(__pyx_t_6); __pyx_t_13++; if (unlikely(0 < 0)) __PYX_ERR(0, 7023, __pyx_L1_error) #else __pyx_t_6 = PySequence_ITEM(__pyx_t_7, __pyx_t_13); __pyx_t_13++; if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 7023, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); #endif } else { if (__pyx_t_13 >= PyTuple_GET_SIZE(__pyx_t_7)) break; #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS __pyx_t_6 = PyTuple_GET_ITEM(__pyx_t_7, __pyx_t_13); __Pyx_INCREF(__pyx_t_6); __pyx_t_13++; if (unlikely(0 < 0)) __PYX_ERR(0, 7023, __pyx_L1_error) #else __pyx_t_6 = PySequence_ITEM(__pyx_t_7, __pyx_t_13); __pyx_t_13++; if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 7023, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); #endif } } else { __pyx_t_6 = __pyx_t_14(__pyx_t_7); if (unlikely(!__pyx_t_6)) { PyObject* exc_type = PyErr_Occurred(); if (exc_type) { if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); else __PYX_ERR(0, 7023, __pyx_L1_error) } break; } __Pyx_GOTREF(__pyx_t_6); } if ((likely(PyTuple_CheckExact(__pyx_t_6))) || (PyList_CheckExact(__pyx_t_6))) { PyObject* sequence = __pyx_t_6; Py_ssize_t size = __Pyx_PySequence_SIZE(sequence); if (unlikely(size != 2)) { if (size > 2) __Pyx_RaiseTooManyValuesError(2); else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); __PYX_ERR(0, 7023, __pyx_L1_error) } #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS if (likely(PyTuple_CheckExact(sequence))) { __pyx_t_12 = PyTuple_GET_ITEM(sequence, 0); __pyx_t_9 = PyTuple_GET_ITEM(sequence, 1); } else { __pyx_t_12 = PyList_GET_ITEM(sequence, 0); __pyx_t_9 = PyList_GET_ITEM(sequence, 1); } __Pyx_INCREF(__pyx_t_12); __Pyx_INCREF(__pyx_t_9); #else __pyx_t_12 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 7023, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_12); __pyx_t_9 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 7023, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_9); #endif __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; } else { Py_ssize_t index = -1; __pyx_t_8 = PyObject_GetIter(__pyx_t_6); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 7023, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_8); __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __pyx_t_15 = Py_TYPE(__pyx_t_8)->tp_iternext; index = 0; __pyx_t_12 = __pyx_t_15(__pyx_t_8); if (unlikely(!__pyx_t_12)) goto __pyx_L17_unpacking_failed; __Pyx_GOTREF(__pyx_t_12); index = 1; __pyx_t_9 = __pyx_t_15(__pyx_t_8); if (unlikely(!__pyx_t_9)) goto __pyx_L17_unpacking_failed; __Pyx_GOTREF(__pyx_t_9); if (__Pyx_IternextUnpackEndCheck(__pyx_t_15(__pyx_t_8), 2) < 0) __PYX_ERR(0, 7023, __pyx_L1_error) __pyx_t_15 = NULL; __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; goto __pyx_L18_unpacking_done; __pyx_L17_unpacking_failed:; __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; __pyx_t_15 = NULL; if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); __PYX_ERR(0, 7023, __pyx_L1_error) __pyx_L18_unpacking_done:; } __Pyx_XDECREF_SET(__pyx_v_gap_start, __pyx_t_12); __pyx_t_12 = 0; __Pyx_XDECREF_SET(__pyx_v_gap_endex, __pyx_t_9); __pyx_t_9 = 0; /* … */ } __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+7024: backups.append(Memory(start=gap_start, endex=gap_endex, validate=False))
if (unlikely(__pyx_v_backups == Py_None)) { PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "append"); __PYX_ERR(0, 7024, __pyx_L1_error) } __pyx_t_6 = __Pyx_PyDict_NewPresized(3); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 7024, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); if (PyDict_SetItem(__pyx_t_6, __pyx_n_s_start, __pyx_v_gap_start) < 0) __PYX_ERR(0, 7024, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_6, __pyx_n_s_endex, __pyx_v_gap_endex) < 0) __PYX_ERR(0, 7024, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_6, __pyx_n_s_validate, Py_False) < 0) __PYX_ERR(0, 7024, __pyx_L1_error) __pyx_t_9 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_10bytesparse_2_c_Memory), __pyx_empty_tuple, __pyx_t_6); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 7024, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_9); __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __pyx_t_16 = __Pyx_PyList_Append(__pyx_v_backups, __pyx_t_9); if (unlikely(__pyx_t_16 == ((int)-1))) __PYX_ERR(0, 7024, __pyx_L1_error) __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0;
7025:
+7026: size = <size_t>(endex - start)
__pyx_v_size = ((size_t)(__pyx_v_endex - __pyx_v_start));
+7027: pattern[0] = Block_RepeatToSize(pattern[0], size)
__pyx_t_17 = __pyx_f_10bytesparse_2_c_Block_RepeatToSize((__pyx_v_pattern[0]), __pyx_v_size); if (unlikely(__pyx_t_17 == ((Block_ *)NULL))) __PYX_ERR(0, 7027, __pyx_L1_error)
(__pyx_v_pattern[0]) = __pyx_t_17;
+7028: pattern[0].address = start
(__pyx_v_pattern[0])->address = __pyx_v_start;
7029:
+7030: for block_index in range(block_index_start, block_index_endex):
__pyx_t_18 = __pyx_v_block_index_endex; __pyx_t_19 = __pyx_t_18; for (__pyx_t_20 = __pyx_v_block_index_start; __pyx_t_20 < __pyx_t_19; __pyx_t_20+=1) { __pyx_v_block_index = __pyx_t_20;
+7031: block = Rack_Get__(blocks, block_index)
__pyx_v_block = __pyx_f_10bytesparse_2_c_Rack_Get__(__pyx_v_blocks, __pyx_v_block_index);
+7032: offset = Block_Start(block) - start
__pyx_v_offset = (__pyx_f_10bytesparse_2_c_Block_Start(__pyx_v_block) - __pyx_v_start);
7033: # CheckAddrToSizeU(offset) # implied
+7034: pattern[0] = Block_Write_(pattern[0], <size_t>offset, Block_Length(block), Block_At__(block, 0))
__pyx_t_17 = __pyx_f_10bytesparse_2_c_Block_Write_((__pyx_v_pattern[0]), ((size_t)__pyx_v_offset), __pyx_f_10bytesparse_2_c_Block_Length(__pyx_v_block), __pyx_f_10bytesparse_2_c_Block_At__(__pyx_v_block, 0)); if (unlikely(__pyx_t_17 == ((Block_ *)NULL))) __PYX_ERR(0, 7034, __pyx_L1_error)
(__pyx_v_pattern[0]) = __pyx_t_17;
}
7035:
+7036: self._ = blocks = Rack_DelSlice_(blocks, block_index_start, block_index_endex)
__pyx_t_2 = __pyx_f_10bytesparse_2_c_Rack_DelSlice_(__pyx_v_blocks, __pyx_v_block_index_start, __pyx_v_block_index_endex); if (unlikely(__pyx_t_2 == ((Rack_ *)NULL))) __PYX_ERR(0, 7036, __pyx_L1_error)
__pyx_v_self->_ = __pyx_t_2;
__pyx_v_blocks = __pyx_t_2;
+7037: self._ = blocks = Rack_Insert_(blocks, block_index_start, pattern[0])
__pyx_t_2 = __pyx_f_10bytesparse_2_c_Rack_Insert_(__pyx_v_blocks, __pyx_v_block_index_start, (__pyx_v_pattern[0])); if (unlikely(__pyx_t_2 == ((Rack_ *)NULL))) __PYX_ERR(0, 7037, __pyx_L1_error)
__pyx_v_self->_ = __pyx_t_2;
__pyx_v_blocks = __pyx_t_2;
7038:
+7039: def flood(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_113flood(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_112flood[] = "Memory.flood(self: u'Memory', start: Optional[Address] = None, endex: Optional[Address] = None, pattern: Union[AnyBytes, Value] = 0, backups: Optional[MemoryList] = None) -> None\nFills emptiness between non-touching blocks.\n\n Arguments:\n start (int):\n Inclusive start address for flooding.\n If ``None``, :attr:`start` is considered.\n\n endex (int):\n Exclusive end address for flooding.\n If ``None``, :attr:`endex` is considered.\n\n pattern (items):\n Pattern of items to fill the range.\n\n backups (list of :obj:`Memory`):\n Optional output list holding backup copies of the deleted\n items, before trimming.\n\n Examples:\n +---+---+---+---+---+---+---+---+---+---+\n | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |\n +===+===+===+===+===+===+===+===+===+===+\n | |[A | B | C]| | |[x | y | z]| |\n +---+---+---+---+---+---+---+---+---+---+\n | |[A | B | C | 1 | 2 | x | y | z]| |\n +---+---+---+---+---+---+---+---+---+---+\n\n >>> memory = Memory(blocks=[[1, b'ABC'], [6, b'xyz']])\n >>> memory.flood(pattern=b'123')\n >>> memory._blocks\n [[1, b'ABC12xyz']]\n\n ~~~\n\n +---+---+---+---+---+---+---+---+---+---+\n | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |\n +===+===+===+===+===+===+===+===+===+===+\n | |[A | B | C]| | |[x | y | z]| |\n +---+---+---+---+---+---+---+---+---+---+\n | |[A | B | C | 2 | 3 | x | y | z]| |\n +---+---+---+---+---+---+---+---+---+---+\n\n >>> memory = Memory(blocks=[[1, b'ABC'], [6, b'xyz']])\n >>> memory.flood(3, 7, b'123')\n >>> memory._blocks\n [[1, b'ABC23xyz']]\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_113flood(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_start = 0; PyObject *__pyx_v_endex = 0; PyObject *__pyx_v_pattern = 0; PyObject *__pyx_v_backups = 0; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("flood (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_start,&__pyx_n_s_endex,&__pyx_n_s_pattern,&__pyx_n_s_backups,0}; PyObject* values[4] = {0,0,0,0}; /* … */ /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_112flood(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_start, PyObject *__pyx_v_endex, PyObject *__pyx_v_pattern, PyObject *__pyx_v_backups) { addr_t __pyx_v_start_; addr_t __pyx_v_endex_; Block_ *__pyx_v_pattern_; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("flood", 0); /* … */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_10); __Pyx_XDECREF(__pyx_t_11); __Pyx_XDECREF(__pyx_t_12); __Pyx_AddTraceback("bytesparse._c.Memory.flood", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_gb_10bytesparse_2_c_6Memory_116generator3(__pyx_CoroutineObject *__pyx_generator, CYTHON_UNUSED PyThreadState *__pyx_tstate, PyObject *__pyx_sent_value); /* proto */
7040: self: 'Memory',
+7041: start: Optional[Address] = None,
values[0] = ((PyObject *)Py_None);
+7042: endex: Optional[Address] = None,
values[1] = ((PyObject *)Py_None); values[2] = ((PyObject *)__pyx_int_0);
7043: pattern: Union[AnyBytes, Value] = 0,
+7044: backups: Optional[MemoryList] = None,
values[3] = ((PyObject *)Py_None); if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); CYTHON_FALLTHROUGH; case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_start); if (value) { values[0] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 1: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_endex); if (value) { values[1] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 2: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pattern); if (value) { values[2] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 3: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_backups); if (value) { values[3] = value; kw_args--; } } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "flood") < 0)) __PYX_ERR(0, 7039, __pyx_L3_error) } } else { switch (PyTuple_GET_SIZE(__pyx_args)) { case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); CYTHON_FALLTHROUGH; case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } } __pyx_v_start = values[0]; __pyx_v_endex = values[1]; __pyx_v_pattern = values[2]; __pyx_v_backups = values[3]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("flood", 0, 0, 4, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 7039, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("bytesparse._c.Memory.flood", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_112flood(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), __pyx_v_start, __pyx_v_endex, __pyx_v_pattern, __pyx_v_backups);
7045: ) -> None:
7046: r"""Fills emptiness between non-touching blocks.
7047:
7048: Arguments:
7049: start (int):
7050: Inclusive start address for flooding.
7051: If ``None``, :attr:`start` is considered.
7052:
7053: endex (int):
7054: Exclusive end address for flooding.
7055: If ``None``, :attr:`endex` is considered.
7056:
7057: pattern (items):
7058: Pattern of items to fill the range.
7059:
7060: backups (list of :obj:`Memory`):
7061: Optional output list holding backup copies of the deleted
7062: items, before trimming.
7063:
7064: Examples:
7065: +---+---+---+---+---+---+---+---+---+---+
7066: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
7067: +===+===+===+===+===+===+===+===+===+===+
7068: | |[A | B | C]| | |[x | y | z]| |
7069: +---+---+---+---+---+---+---+---+---+---+
7070: | |[A | B | C | 1 | 2 | x | y | z]| |
7071: +---+---+---+---+---+---+---+---+---+---+
7072:
7073: >>> memory = Memory(blocks=[[1, b'ABC'], [6, b'xyz']])
7074: >>> memory.flood(pattern=b'123')
7075: >>> memory._blocks
7076: [[1, b'ABC12xyz']]
7077:
7078: ~~~
7079:
7080: +---+---+---+---+---+---+---+---+---+---+
7081: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
7082: +===+===+===+===+===+===+===+===+===+===+
7083: | |[A | B | C]| | |[x | y | z]| |
7084: +---+---+---+---+---+---+---+---+---+---+
7085: | |[A | B | C | 2 | 3 | x | y | z]| |
7086: +---+---+---+---+---+---+---+---+---+---+
7087:
7088: >>> memory = Memory(blocks=[[1, b'ABC'], [6, b'xyz']])
7089: >>> memory.flood(3, 7, b'123')
7090: >>> memory._blocks
7091: [[1, b'ABC23xyz']]
7092: """
7093: cdef:
7094: addr_t start_
7095: addr_t endex_
+7096: Block_* pattern_ = NULL
__pyx_v_pattern_ = NULL;
7097:
+7098: start_, endex_ = self.bound_(start, endex)
__pyx_t_1 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->bound_(__pyx_v_self, __pyx_v_start, __pyx_v_endex); __pyx_t_2 = __pyx_t_1.f0; __pyx_t_3 = __pyx_t_1.f1; __pyx_v_start_ = __pyx_t_2; __pyx_v_endex_ = __pyx_t_3;
+7099: if start_ < endex_:
__pyx_t_4 = ((__pyx_v_start_ < __pyx_v_endex_) != 0); if (__pyx_t_4) { /* … */ }
+7100: pattern_ = Block_FromObject(0, pattern, False) # size checked later on
__pyx_t_5 = __pyx_f_10bytesparse_2_c_Block_FromObject(0, __pyx_v_pattern, 0); if (unlikely(__pyx_t_5 == ((Block_ *)NULL))) __PYX_ERR(0, 7100, __pyx_L1_error)
__pyx_v_pattern_ = __pyx_t_5;
+7101: try:
{ /*try:*/ { /* … */ } __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; goto __pyx_L9_try_end; __pyx_L4_error:; /* … */ __Pyx_XGIVEREF(__pyx_t_6); __Pyx_XGIVEREF(__pyx_t_7); __Pyx_XGIVEREF(__pyx_t_8); __Pyx_ExceptionReset(__pyx_t_6, __pyx_t_7, __pyx_t_8); goto __pyx_L1_error; __pyx_L9_try_end:; }
+7102: self.flood_(start_, endex_, &pattern_, backups)
if (!(likely(PyList_CheckExact(__pyx_v_backups))||((__pyx_v_backups) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "list", Py_TYPE(__pyx_v_backups)->tp_name), 0))) __PYX_ERR(0, 7102, __pyx_L4_error) __pyx_t_9 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_v_self->__pyx_vtab)->flood_(__pyx_v_self, __pyx_v_start_, __pyx_v_endex_, (&__pyx_v_pattern_), ((PyObject*)__pyx_v_backups)); if (unlikely(__pyx_t_9 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 7102, __pyx_L4_error)
+7103: except:
/*except:*/ { __Pyx_AddTraceback("bytesparse._c.Memory.flood", __pyx_clineno, __pyx_lineno, __pyx_filename); if (__Pyx_GetException(&__pyx_t_10, &__pyx_t_11, &__pyx_t_12) < 0) __PYX_ERR(0, 7103, __pyx_L6_except_error) __Pyx_GOTREF(__pyx_t_10); __Pyx_GOTREF(__pyx_t_11); __Pyx_GOTREF(__pyx_t_12);
+7104: Block_Free(pattern_) # orphan
(void)(__pyx_f_10bytesparse_2_c_Block_Free(__pyx_v_pattern_));
+7105: raise
__Pyx_GIVEREF(__pyx_t_10); __Pyx_GIVEREF(__pyx_t_11); __Pyx_XGIVEREF(__pyx_t_12); __Pyx_ErrRestoreWithState(__pyx_t_10, __pyx_t_11, __pyx_t_12); __pyx_t_10 = 0; __pyx_t_11 = 0; __pyx_t_12 = 0; __PYX_ERR(0, 7105, __pyx_L6_except_error) } __pyx_L6_except_error:;
7106:
+7107: def keys(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_115keys(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_114keys[] = "Memory.keys(self: u'Memory', start: Optional[Address] = None, endex: Optional[Union[Address, EllipsisType]] = None) -> Iterator[Address]\nIterates over addresses.\n\n Iterates over addresses, from `start` to `endex`.\n Implemets the interface of :obj:`dict`.\n\n Arguments:\n start (int):\n Inclusive start address.\n If ``None``, :attr:`start` is considered.\n\n endex (int):\n Exclusive end address.\n If ``None``, :attr:`endex` is considered.\n If ``Ellipsis``, the iterator is infinite.\n\n Yields:\n int: Range address.\n\n Examples:\n >>> from itertools import islice\n >>> memory = Memory()\n >>> list(memory.keys())\n []\n >>> list(memory.keys(endex=8))\n [0, 1, 2, 3, 4, 5, 6, 7]\n >>> list(memory.keys(3, 8))\n [3, 4, 5, 6, 7]\n >>> list(islice(memory.keys(3, ...), 7))\n [3, 4, 5, 6, 7, 8, 9]\n\n ~~~\n\n +---+---+---+---+---+---+---+---+---+---+\n | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |\n +===+===+===+===+===+===+===+===+===+===+\n | |[A | B | C]| | |[x | y | z]| |\n +---+---+---+---+---+---+---+---+---+---+\n\n >>> memory = Memory(blocks=[[1, b'ABC'], [6, b'xyz']])\n >>> list(memory.keys())\n [1, 2, 3, 4, 5, 6, 7, 8]\n >>> list(memory.keys(endex=8))\n [0, 1, 2, 3, 4, 5, 6, 7]\n >>> list(memory.keys(3, 8))\n [3, 4, 5, 6, 7]\n >>> list(islice(memory.keys(3, ...), 7))\n [3, 4, 5, 6, 7, 8, 9]\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_115keys(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_start = 0; PyObject *__pyx_v_endex = 0; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("keys (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_start,&__pyx_n_s_endex,0}; PyObject* values[2] = {0,0}; /* … */ /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_114keys(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_start, PyObject *__pyx_v_endex) { struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_5_keys *__pyx_cur_scope; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("keys", 0); __pyx_cur_scope = (struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_5_keys *)__pyx_tp_new_10bytesparse_2_c___pyx_scope_struct_5_keys(__pyx_ptype_10bytesparse_2_c___pyx_scope_struct_5_keys, __pyx_empty_tuple, NULL); if (unlikely(!__pyx_cur_scope)) { __pyx_cur_scope = ((struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_5_keys *)Py_None); __Pyx_INCREF(Py_None); __PYX_ERR(0, 7107, __pyx_L1_error) } else { __Pyx_GOTREF(__pyx_cur_scope); } __pyx_cur_scope->__pyx_v_self = __pyx_v_self; __Pyx_INCREF((PyObject *)__pyx_cur_scope->__pyx_v_self); __Pyx_GIVEREF((PyObject *)__pyx_cur_scope->__pyx_v_self); __pyx_cur_scope->__pyx_v_start = __pyx_v_start; __Pyx_INCREF(__pyx_cur_scope->__pyx_v_start); __Pyx_GIVEREF(__pyx_cur_scope->__pyx_v_start); __pyx_cur_scope->__pyx_v_endex = __pyx_v_endex; __Pyx_INCREF(__pyx_cur_scope->__pyx_v_endex); __Pyx_GIVEREF(__pyx_cur_scope->__pyx_v_endex); { __pyx_CoroutineObject *gen = __Pyx_Generator_New((__pyx_coroutine_body_t) __pyx_gb_10bytesparse_2_c_6Memory_116generator3, NULL, (PyObject *) __pyx_cur_scope, __pyx_n_s_keys, __pyx_n_s_Memory_keys, __pyx_n_s_bytesparse__c); if (unlikely(!gen)) __PYX_ERR(0, 7107, __pyx_L1_error) __Pyx_DECREF(__pyx_cur_scope); __Pyx_RefNannyFinishContext(); return (PyObject *) gen; } /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Memory.keys", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __Pyx_DECREF(((PyObject *)__pyx_cur_scope)); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_gb_10bytesparse_2_c_6Memory_116generator3(__pyx_CoroutineObject *__pyx_generator, CYTHON_UNUSED PyThreadState *__pyx_tstate, PyObject *__pyx_sent_value) /* generator body */ { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("keys", 0); __pyx_L3_first_run:; if (unlikely(!__pyx_sent_value)) __PYX_ERR(0, 7107, __pyx_L1_error) /* … */ /* function exit code */ PyErr_SetNone(PyExc_StopIteration); goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_4); __Pyx_AddTraceback("keys", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_L0:; __Pyx_XDECREF(__pyx_r); __pyx_r = 0; #if !CYTHON_USE_EXC_INFO_STACK __Pyx_Coroutine_ResetAndClearException(__pyx_generator); #endif __pyx_generator->resume_label = -1; __Pyx_Coroutine_clear((PyObject*)__pyx_generator); __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_gb_10bytesparse_2_c_6Memory_119generator4(__pyx_CoroutineObject *__pyx_generator, CYTHON_UNUSED PyThreadState *__pyx_tstate, PyObject *__pyx_sent_value); /* proto */ /* … */ struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_5_keys { PyObject_HEAD PyObject *__pyx_v_endex; addr_t __pyx_v_endex_; struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self; PyObject *__pyx_v_start; addr_t __pyx_v_start_; };
7108: self: 'Memory',
+7109: start: Optional[Address] = None,
values[0] = ((PyObject *)Py_None);
+7110: endex: Optional[Union[Address, EllipsisType]] = None,
values[1] = ((PyObject *)Py_None); if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_start); if (value) { values[0] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 1: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_endex); if (value) { values[1] = value; kw_args--; } } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "keys") < 0)) __PYX_ERR(0, 7107, __pyx_L3_error) } } else { switch (PyTuple_GET_SIZE(__pyx_args)) { case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } } __pyx_v_start = values[0]; __pyx_v_endex = values[1]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("keys", 0, 0, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 7107, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("bytesparse._c.Memory.keys", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_114keys(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), __pyx_v_start, __pyx_v_endex);
7111: ) -> Iterator[Address]:
7112: r"""Iterates over addresses.
7113:
7114: Iterates over addresses, from `start` to `endex`.
7115: Implemets the interface of :obj:`dict`.
7116:
7117: Arguments:
7118: start (int):
7119: Inclusive start address.
7120: If ``None``, :attr:`start` is considered.
7121:
7122: endex (int):
7123: Exclusive end address.
7124: If ``None``, :attr:`endex` is considered.
7125: If ``Ellipsis``, the iterator is infinite.
7126:
7127: Yields:
7128: int: Range address.
7129:
7130: Examples:
7131: >>> from itertools import islice
7132: >>> memory = Memory()
7133: >>> list(memory.keys())
7134: []
7135: >>> list(memory.keys(endex=8))
7136: [0, 1, 2, 3, 4, 5, 6, 7]
7137: >>> list(memory.keys(3, 8))
7138: [3, 4, 5, 6, 7]
7139: >>> list(islice(memory.keys(3, ...), 7))
7140: [3, 4, 5, 6, 7, 8, 9]
7141:
7142: ~~~
7143:
7144: +---+---+---+---+---+---+---+---+---+---+
7145: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
7146: +===+===+===+===+===+===+===+===+===+===+
7147: | |[A | B | C]| | |[x | y | z]| |
7148: +---+---+---+---+---+---+---+---+---+---+
7149:
7150: >>> memory = Memory(blocks=[[1, b'ABC'], [6, b'xyz']])
7151: >>> list(memory.keys())
7152: [1, 2, 3, 4, 5, 6, 7, 8]
7153: >>> list(memory.keys(endex=8))
7154: [0, 1, 2, 3, 4, 5, 6, 7]
7155: >>> list(memory.keys(3, 8))
7156: [3, 4, 5, 6, 7]
7157: >>> list(islice(memory.keys(3, ...), 7))
7158: [3, 4, 5, 6, 7, 8, 9]
7159: """
7160: cdef:
7161: addr_t start_
7162: addr_t endex_
7163:
+7164: if start is None:
__pyx_t_1 = (__pyx_cur_scope->__pyx_v_start == Py_None); __pyx_t_2 = (__pyx_t_1 != 0); if (__pyx_t_2) { /* … */ goto __pyx_L4; }
+7165: start_ = self.start_()
__pyx_cur_scope->__pyx_v_start_ = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_cur_scope->__pyx_v_self->__pyx_vtab)->start_(__pyx_cur_scope->__pyx_v_self);
7166: else:
+7167: start_ = <addr_t>start
/*else*/ { __pyx_t_3 = __Pyx_PyInt_As_uint_fast64_t(__pyx_cur_scope->__pyx_v_start); if (unlikely((__pyx_t_3 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 7167, __pyx_L1_error) __pyx_cur_scope->__pyx_v_start_ = ((addr_t)__pyx_t_3); } __pyx_L4:;
7168:
+7169: if endex is None:
__pyx_t_2 = (__pyx_cur_scope->__pyx_v_endex == Py_None); __pyx_t_1 = (__pyx_t_2 != 0); if (__pyx_t_1) { /* … */ goto __pyx_L5; }
+7170: endex_ = self.endex_()
__pyx_cur_scope->__pyx_v_endex_ = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_cur_scope->__pyx_v_self->__pyx_vtab)->endex_(__pyx_cur_scope->__pyx_v_self);
+7171: elif endex is Ellipsis:
__pyx_t_1 = (__pyx_cur_scope->__pyx_v_endex == __pyx_builtin_Ellipsis); __pyx_t_2 = (__pyx_t_1 != 0); if (__pyx_t_2) { /* … */ goto __pyx_L5; }
+7172: endex_ = ADDR_MAX
__pyx_cur_scope->__pyx_v_endex_ = ADDR_MAX;
7173: else:
+7174: endex_ = <addr_t>endex
/*else*/ { __pyx_t_3 = __Pyx_PyInt_As_uint_fast64_t(__pyx_cur_scope->__pyx_v_endex); if (unlikely((__pyx_t_3 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 7174, __pyx_L1_error) __pyx_cur_scope->__pyx_v_endex_ = ((addr_t)__pyx_t_3); } __pyx_L5:;
7175:
+7176: while start_ < endex_:
while (1) { __pyx_t_2 = ((__pyx_cur_scope->__pyx_v_start_ < __pyx_cur_scope->__pyx_v_endex_) != 0); if (!__pyx_t_2) break;
+7177: yield start_
__pyx_t_4 = __Pyx_PyInt_From_uint_fast64_t(__pyx_cur_scope->__pyx_v_start_); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 7177, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __pyx_r = __pyx_t_4; __pyx_t_4 = 0; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); __Pyx_Coroutine_ResetAndClearException(__pyx_generator); /* return from generator, yielding value */ __pyx_generator->resume_label = 1; return __pyx_r; __pyx_L8_resume_from_yield:; if (unlikely(!__pyx_sent_value)) __PYX_ERR(0, 7177, __pyx_L1_error)
+7178: start_ += 1
__pyx_cur_scope->__pyx_v_start_ = (__pyx_cur_scope->__pyx_v_start_ + 1); } CYTHON_MAYBE_UNUSED_VAR(__pyx_cur_scope);
7179:
+7180: def values(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_118values(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_117values[] = "Memory.values(self: u'Memory', start: Optional[Address] = None, endex: Optional[Union[Address, EllipsisType]] = None, pattern: Optional[Union[AnyBytes, Value]] = None) -> Iterator[Optional[Value]]\nIterates over values.\n\n Iterates over values, from `start` to `endex`.\n Implemets the interface of :obj:`dict`.\n\n Arguments:\n start (int):\n Inclusive start address.\n If ``None``, :attr:`start` is considered.\n\n endex (int):\n Exclusive end address.\n If ``None``, :attr:`endex` is considered.\n If ``Ellipsis``, the iterator is infinite.\n\n pattern (items):\n Pattern of values to fill emptiness.\n\n Yields:\n int: Range values.\n\n Examples:\n >>> from itertools import islice\n >>> memory = Memory()\n >>> list(memory.values(endex=8))\n [None, None, None, None, None, None, None]\n >>> list(memory.values(3, 8))\n [None, None, None, None, None]\n >>> list(islice(memory.values(3, ...), 7))\n [None, None, None, None, None, None, None]\n\n ~~~\n\n +---+---+---+---+---+---+---+---+---+---+\n | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |\n +===+===+===+===+===+===+===+===+===+===+\n | |[A | B | C]| | |[x | y | z]| |\n +---+---+---+---+---+---+---+---+---+---+\n | | 65| 66| 67| | |120|121|122| |\n +---+---+---+---+---+---+---+---+---+---+\n\n >>> memory = Memory(blocks=[[1, b'ABC'], [6, b'xyz']])\n >>> list(memory.values())\n [65, 66, 67, None, None, 120, 121, 122]\n >>> list(memory.values(3, 8))\n [67, None, None, 120, 121]\n >>> list(islice(memory.values(3, ...), 7))\n [67, None, None, 120, 121, 122, None]\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_118values(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_start = 0; PyObject *__pyx_v_endex = 0; PyObject *__pyx_v_pattern = 0; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("values (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_start,&__pyx_n_s_endex,&__pyx_n_s_pattern,0}; PyObject* values[3] = {0,0,0}; /* … */ /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_117values(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_start, PyObject *__pyx_v_endex, PyObject *__pyx_v_pattern) { struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_6_values *__pyx_cur_scope; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("values", 0); __pyx_cur_scope = (struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_6_values *)__pyx_tp_new_10bytesparse_2_c___pyx_scope_struct_6_values(__pyx_ptype_10bytesparse_2_c___pyx_scope_struct_6_values, __pyx_empty_tuple, NULL); if (unlikely(!__pyx_cur_scope)) { __pyx_cur_scope = ((struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_6_values *)Py_None); __Pyx_INCREF(Py_None); __PYX_ERR(0, 7180, __pyx_L1_error) } else { __Pyx_GOTREF(__pyx_cur_scope); } __pyx_cur_scope->__pyx_v_self = __pyx_v_self; __Pyx_INCREF((PyObject *)__pyx_cur_scope->__pyx_v_self); __Pyx_GIVEREF((PyObject *)__pyx_cur_scope->__pyx_v_self); __pyx_cur_scope->__pyx_v_start = __pyx_v_start; __Pyx_INCREF(__pyx_cur_scope->__pyx_v_start); __Pyx_GIVEREF(__pyx_cur_scope->__pyx_v_start); __pyx_cur_scope->__pyx_v_endex = __pyx_v_endex; __Pyx_INCREF(__pyx_cur_scope->__pyx_v_endex); __Pyx_GIVEREF(__pyx_cur_scope->__pyx_v_endex); __pyx_cur_scope->__pyx_v_pattern = __pyx_v_pattern; __Pyx_INCREF(__pyx_cur_scope->__pyx_v_pattern); __Pyx_GIVEREF(__pyx_cur_scope->__pyx_v_pattern); { __pyx_CoroutineObject *gen = __Pyx_Generator_New((__pyx_coroutine_body_t) __pyx_gb_10bytesparse_2_c_6Memory_119generator4, NULL, (PyObject *) __pyx_cur_scope, __pyx_n_s_values, __pyx_n_s_Memory_values, __pyx_n_s_bytesparse__c); if (unlikely(!gen)) __PYX_ERR(0, 7180, __pyx_L1_error) __Pyx_DECREF(__pyx_cur_scope); __Pyx_RefNannyFinishContext(); return (PyObject *) gen; } /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Memory.values", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __Pyx_DECREF(((PyObject *)__pyx_cur_scope)); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_gb_10bytesparse_2_c_6Memory_119generator4(__pyx_CoroutineObject *__pyx_generator, CYTHON_UNUSED PyThreadState *__pyx_tstate, PyObject *__pyx_sent_value) /* generator body */ { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("values", 0); __pyx_L3_first_run:; if (unlikely(!__pyx_sent_value)) __PYX_ERR(0, 7180, __pyx_L1_error) /* … */ /* function exit code */ PyErr_SetNone(PyExc_StopIteration); goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_4); __Pyx_XDECREF(__pyx_t_5); __Pyx_XDECREF(__pyx_t_6); __Pyx_XDECREF(__pyx_t_7); __Pyx_AddTraceback("values", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_L0:; __Pyx_XDECREF(__pyx_r); __pyx_r = 0; #if !CYTHON_USE_EXC_INFO_STACK __Pyx_Coroutine_ResetAndClearException(__pyx_generator); #endif __pyx_generator->resume_label = -1; __Pyx_Coroutine_clear((PyObject*)__pyx_generator); __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_gb_10bytesparse_2_c_6Memory_122generator5(__pyx_CoroutineObject *__pyx_generator, CYTHON_UNUSED PyThreadState *__pyx_tstate, PyObject *__pyx_sent_value); /* proto */ /* … */ struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_6_values { PyObject_HEAD PyObject *__pyx_v_endex; addr_t __pyx_v_endex_; PyObject *__pyx_v_pattern; struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self; PyObject *__pyx_v_start; addr_t __pyx_v_start_; };
7181: self: 'Memory',
+7182: start: Optional[Address] = None,
values[0] = ((PyObject *)Py_None);
+7183: endex: Optional[Union[Address, EllipsisType]] = None,
values[1] = ((PyObject *)Py_None);
+7184: pattern: Optional[Union[AnyBytes, Value]] = None,
values[2] = ((PyObject *)Py_None); if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_start); if (value) { values[0] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 1: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_endex); if (value) { values[1] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 2: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pattern); if (value) { values[2] = value; kw_args--; } } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "values") < 0)) __PYX_ERR(0, 7180, __pyx_L3_error) } } else { switch (PyTuple_GET_SIZE(__pyx_args)) { case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } } __pyx_v_start = values[0]; __pyx_v_endex = values[1]; __pyx_v_pattern = values[2]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("values", 0, 0, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 7180, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("bytesparse._c.Memory.values", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_117values(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), __pyx_v_start, __pyx_v_endex, __pyx_v_pattern);
7185: ) -> Iterator[Optional[Value]]:
7186: r"""Iterates over values.
7187:
7188: Iterates over values, from `start` to `endex`.
7189: Implemets the interface of :obj:`dict`.
7190:
7191: Arguments:
7192: start (int):
7193: Inclusive start address.
7194: If ``None``, :attr:`start` is considered.
7195:
7196: endex (int):
7197: Exclusive end address.
7198: If ``None``, :attr:`endex` is considered.
7199: If ``Ellipsis``, the iterator is infinite.
7200:
7201: pattern (items):
7202: Pattern of values to fill emptiness.
7203:
7204: Yields:
7205: int: Range values.
7206:
7207: Examples:
7208: >>> from itertools import islice
7209: >>> memory = Memory()
7210: >>> list(memory.values(endex=8))
7211: [None, None, None, None, None, None, None]
7212: >>> list(memory.values(3, 8))
7213: [None, None, None, None, None]
7214: >>> list(islice(memory.values(3, ...), 7))
7215: [None, None, None, None, None, None, None]
7216:
7217: ~~~
7218:
7219: +---+---+---+---+---+---+---+---+---+---+
7220: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
7221: +===+===+===+===+===+===+===+===+===+===+
7222: | |[A | B | C]| | |[x | y | z]| |
7223: +---+---+---+---+---+---+---+---+---+---+
7224: | | 65| 66| 67| | |120|121|122| |
7225: +---+---+---+---+---+---+---+---+---+---+
7226:
7227: >>> memory = Memory(blocks=[[1, b'ABC'], [6, b'xyz']])
7228: >>> list(memory.values())
7229: [65, 66, 67, None, None, 120, 121, 122]
7230: >>> list(memory.values(3, 8))
7231: [67, None, None, 120, 121]
7232: >>> list(islice(memory.values(3, ...), 7))
7233: [67, None, None, 120, 121, 122, None]
7234: """
7235: cdef:
7236: addr_t start_
7237: addr_t endex_
7238:
+7239: if start is None:
__pyx_t_1 = (__pyx_cur_scope->__pyx_v_start == Py_None); __pyx_t_2 = (__pyx_t_1 != 0); if (__pyx_t_2) { /* … */ goto __pyx_L4; }
+7240: start_ = self.start_()
__pyx_cur_scope->__pyx_v_start_ = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_cur_scope->__pyx_v_self->__pyx_vtab)->start_(__pyx_cur_scope->__pyx_v_self);
7241: else:
+7242: start_ = <addr_t>start
/*else*/ { __pyx_t_3 = __Pyx_PyInt_As_uint_fast64_t(__pyx_cur_scope->__pyx_v_start); if (unlikely((__pyx_t_3 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 7242, __pyx_L1_error) __pyx_cur_scope->__pyx_v_start_ = ((addr_t)__pyx_t_3); } __pyx_L4:;
7243:
+7244: if endex is None:
__pyx_t_2 = (__pyx_cur_scope->__pyx_v_endex == Py_None); __pyx_t_1 = (__pyx_t_2 != 0); if (__pyx_t_1) { /* … */ goto __pyx_L5; }
+7245: endex_ = self.endex_()
__pyx_cur_scope->__pyx_v_endex_ = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_cur_scope->__pyx_v_self->__pyx_vtab)->endex_(__pyx_cur_scope->__pyx_v_self);
+7246: elif endex is Ellipsis:
__pyx_t_1 = (__pyx_cur_scope->__pyx_v_endex == __pyx_builtin_Ellipsis); __pyx_t_2 = (__pyx_t_1 != 0); if (__pyx_t_2) { /* … */ goto __pyx_L5; }
+7247: endex_ = ADDR_MAX
__pyx_cur_scope->__pyx_v_endex_ = ADDR_MAX;
7248: else:
+7249: endex_ = <addr_t>endex
/*else*/ { __pyx_t_3 = __Pyx_PyInt_As_uint_fast64_t(__pyx_cur_scope->__pyx_v_endex); if (unlikely((__pyx_t_3 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 7249, __pyx_L1_error) __pyx_cur_scope->__pyx_v_endex_ = ((addr_t)__pyx_t_3); } __pyx_L5:;
7250:
+7251: yield from Rover(self, start_, endex_, pattern, True, endex is Ellipsis)
__pyx_t_4 = __Pyx_PyInt_From_uint_fast64_t(__pyx_cur_scope->__pyx_v_start_); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 7251, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __pyx_t_5 = __Pyx_PyInt_From_uint_fast64_t(__pyx_cur_scope->__pyx_v_endex_); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 7251, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __pyx_t_2 = (__pyx_cur_scope->__pyx_v_endex == __pyx_builtin_Ellipsis); __pyx_t_6 = __Pyx_PyBool_FromLong(__pyx_t_2); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 7251, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __pyx_t_7 = PyTuple_New(6); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 7251, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); __Pyx_INCREF(((PyObject *)__pyx_cur_scope->__pyx_v_self)); __Pyx_GIVEREF(((PyObject *)__pyx_cur_scope->__pyx_v_self)); PyTuple_SET_ITEM(__pyx_t_7, 0, ((PyObject *)__pyx_cur_scope->__pyx_v_self)); __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_7, 1, __pyx_t_4); __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_7, 2, __pyx_t_5); __Pyx_INCREF(__pyx_cur_scope->__pyx_v_pattern); __Pyx_GIVEREF(__pyx_cur_scope->__pyx_v_pattern); PyTuple_SET_ITEM(__pyx_t_7, 3, __pyx_cur_scope->__pyx_v_pattern); __Pyx_INCREF(Py_True); __Pyx_GIVEREF(Py_True); PyTuple_SET_ITEM(__pyx_t_7, 4, Py_True); __Pyx_GIVEREF(__pyx_t_6); PyTuple_SET_ITEM(__pyx_t_7, 5, __pyx_t_6); __pyx_t_4 = 0; __pyx_t_5 = 0; __pyx_t_6 = 0; __pyx_t_6 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_10bytesparse_2_c_Rover), __pyx_t_7, NULL); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 7251, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; __pyx_r = __Pyx_Generator_Yield_From(__pyx_generator, __pyx_t_6); __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_XGOTREF(__pyx_r); if (likely(__pyx_r)) { __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); __Pyx_Coroutine_ResetAndClearException(__pyx_generator); /* return from generator, yielding value */ __pyx_generator->resume_label = 1; return __pyx_r; __pyx_L6_resume_from_yield_from:; if (unlikely(!__pyx_sent_value)) __PYX_ERR(0, 7251, __pyx_L1_error) } else { PyObject* exc_type = __Pyx_PyErr_Occurred(); if (exc_type) { if (likely(exc_type == PyExc_StopIteration || (exc_type != PyExc_GeneratorExit && __Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration)))) PyErr_Clear(); else __PYX_ERR(0, 7251, __pyx_L1_error) } } CYTHON_MAYBE_UNUSED_VAR(__pyx_cur_scope);
7252:
+7253: def rvalues(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_121rvalues(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_120rvalues[] = "Memory.rvalues(self: u'Memory', start: Optional[Union[Address, EllipsisType]] = None, endex: Optional[Address] = None, pattern: Optional[Union[AnyBytes, Value]] = None) -> Iterator[Optional[Value]]\nIterates over values, reversed order.\n\n Iterates over values, from `endex` to `start`.\n\n Arguments:\n start (int):\n Inclusive start address.\n If ``None``, :attr:`start` is considered.\n If ``Ellipsis``, the iterator is infinite.\n\n endex (int):\n Exclusive end address.\n If ``None``, :attr:`endex` is considered.\n\n pattern (items):\n Pattern of values to fill emptiness.\n\n Yields:\n int: Range values.\n\n Examples:\n >>> from itertools import islice\n >>> memory = Memory()\n >>> list(memory.values(endex=8))\n [None, None, None, None, None, None, None]\n >>> list(memory.values(3, 8))\n [None, None, None, None, None]\n >>> list(islice(memory.values(3, ...), 7))\n [None, None, None, None, None, None, None]\n\n ~~~\n\n +---+---+---+---+---+---+---+---+---+---+\n | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |\n +===+===+===+===+===+===+===+===+===+===+\n | |[A | B | C]| | |[x | y | z]| |\n +---+---+---+---+---+---+---+---+---+---+\n | | 65| 66| 67| | |120|121|122| |\n +---+---+---+---+---+---+---+---+---+---+\n\n >>> memory = Memory(blocks=[[1, b'ABC'], [6, b'xyz']])\n >>> list(memory.values())\n [65, 66, 67, None, None, 120, 121, 122]\n >>> list(memory.values(3, 8))\n [67, None, None, 120, 121]\n >>> list(islice(memory.values(3, ...), 7))\n [67, None, None, 120, 121, 122, None]\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_121rvalues(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_start = 0; PyObject *__pyx_v_endex = 0; PyObject *__pyx_v_pattern = 0; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("rvalues (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_start,&__pyx_n_s_endex,&__pyx_n_s_pattern,0}; PyObject* values[3] = {0,0,0}; /* … */ /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_120rvalues(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_start, PyObject *__pyx_v_endex, PyObject *__pyx_v_pattern) { struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_7_rvalues *__pyx_cur_scope; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("rvalues", 0); __pyx_cur_scope = (struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_7_rvalues *)__pyx_tp_new_10bytesparse_2_c___pyx_scope_struct_7_rvalues(__pyx_ptype_10bytesparse_2_c___pyx_scope_struct_7_rvalues, __pyx_empty_tuple, NULL); if (unlikely(!__pyx_cur_scope)) { __pyx_cur_scope = ((struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_7_rvalues *)Py_None); __Pyx_INCREF(Py_None); __PYX_ERR(0, 7253, __pyx_L1_error) } else { __Pyx_GOTREF(__pyx_cur_scope); } __pyx_cur_scope->__pyx_v_self = __pyx_v_self; __Pyx_INCREF((PyObject *)__pyx_cur_scope->__pyx_v_self); __Pyx_GIVEREF((PyObject *)__pyx_cur_scope->__pyx_v_self); __pyx_cur_scope->__pyx_v_start = __pyx_v_start; __Pyx_INCREF(__pyx_cur_scope->__pyx_v_start); __Pyx_GIVEREF(__pyx_cur_scope->__pyx_v_start); __pyx_cur_scope->__pyx_v_endex = __pyx_v_endex; __Pyx_INCREF(__pyx_cur_scope->__pyx_v_endex); __Pyx_GIVEREF(__pyx_cur_scope->__pyx_v_endex); __pyx_cur_scope->__pyx_v_pattern = __pyx_v_pattern; __Pyx_INCREF(__pyx_cur_scope->__pyx_v_pattern); __Pyx_GIVEREF(__pyx_cur_scope->__pyx_v_pattern); { __pyx_CoroutineObject *gen = __Pyx_Generator_New((__pyx_coroutine_body_t) __pyx_gb_10bytesparse_2_c_6Memory_122generator5, NULL, (PyObject *) __pyx_cur_scope, __pyx_n_s_rvalues, __pyx_n_s_Memory_rvalues, __pyx_n_s_bytesparse__c); if (unlikely(!gen)) __PYX_ERR(0, 7253, __pyx_L1_error) __Pyx_DECREF(__pyx_cur_scope); __Pyx_RefNannyFinishContext(); return (PyObject *) gen; } /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Memory.rvalues", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __Pyx_DECREF(((PyObject *)__pyx_cur_scope)); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_gb_10bytesparse_2_c_6Memory_122generator5(__pyx_CoroutineObject *__pyx_generator, CYTHON_UNUSED PyThreadState *__pyx_tstate, PyObject *__pyx_sent_value) /* generator body */ { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("rvalues", 0); __pyx_L3_first_run:; if (unlikely(!__pyx_sent_value)) __PYX_ERR(0, 7253, __pyx_L1_error) /* … */ /* function exit code */ PyErr_SetNone(PyExc_StopIteration); goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_4); __Pyx_XDECREF(__pyx_t_5); __Pyx_XDECREF(__pyx_t_6); __Pyx_XDECREF(__pyx_t_7); __Pyx_AddTraceback("rvalues", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_L0:; __Pyx_XDECREF(__pyx_r); __pyx_r = 0; #if !CYTHON_USE_EXC_INFO_STACK __Pyx_Coroutine_ResetAndClearException(__pyx_generator); #endif __pyx_generator->resume_label = -1; __Pyx_Coroutine_clear((PyObject*)__pyx_generator); __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_gb_10bytesparse_2_c_6Memory_125generator6(__pyx_CoroutineObject *__pyx_generator, CYTHON_UNUSED PyThreadState *__pyx_tstate, PyObject *__pyx_sent_value); /* proto */ /* … */ struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_7_rvalues { PyObject_HEAD PyObject *__pyx_v_endex; addr_t __pyx_v_endex_; PyObject *__pyx_v_pattern; struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self; PyObject *__pyx_v_start; addr_t __pyx_v_start_; };
7254: self: 'Memory',
+7255: start: Optional[Union[Address, EllipsisType]] = None,
values[0] = ((PyObject *)Py_None);
+7256: endex: Optional[Address] = None,
values[1] = ((PyObject *)Py_None);
+7257: pattern: Optional[Union[AnyBytes, Value]] = None,
values[2] = ((PyObject *)Py_None); if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_start); if (value) { values[0] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 1: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_endex); if (value) { values[1] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 2: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pattern); if (value) { values[2] = value; kw_args--; } } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "rvalues") < 0)) __PYX_ERR(0, 7253, __pyx_L3_error) } } else { switch (PyTuple_GET_SIZE(__pyx_args)) { case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } } __pyx_v_start = values[0]; __pyx_v_endex = values[1]; __pyx_v_pattern = values[2]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("rvalues", 0, 0, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 7253, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("bytesparse._c.Memory.rvalues", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_120rvalues(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), __pyx_v_start, __pyx_v_endex, __pyx_v_pattern);
7258: ) -> Iterator[Optional[Value]]:
7259: r"""Iterates over values, reversed order.
7260:
7261: Iterates over values, from `endex` to `start`.
7262:
7263: Arguments:
7264: start (int):
7265: Inclusive start address.
7266: If ``None``, :attr:`start` is considered.
7267: If ``Ellipsis``, the iterator is infinite.
7268:
7269: endex (int):
7270: Exclusive end address.
7271: If ``None``, :attr:`endex` is considered.
7272:
7273: pattern (items):
7274: Pattern of values to fill emptiness.
7275:
7276: Yields:
7277: int: Range values.
7278:
7279: Examples:
7280: >>> from itertools import islice
7281: >>> memory = Memory()
7282: >>> list(memory.values(endex=8))
7283: [None, None, None, None, None, None, None]
7284: >>> list(memory.values(3, 8))
7285: [None, None, None, None, None]
7286: >>> list(islice(memory.values(3, ...), 7))
7287: [None, None, None, None, None, None, None]
7288:
7289: ~~~
7290:
7291: +---+---+---+---+---+---+---+---+---+---+
7292: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
7293: +===+===+===+===+===+===+===+===+===+===+
7294: | |[A | B | C]| | |[x | y | z]| |
7295: +---+---+---+---+---+---+---+---+---+---+
7296: | | 65| 66| 67| | |120|121|122| |
7297: +---+---+---+---+---+---+---+---+---+---+
7298:
7299: >>> memory = Memory(blocks=[[1, b'ABC'], [6, b'xyz']])
7300: >>> list(memory.values())
7301: [65, 66, 67, None, None, 120, 121, 122]
7302: >>> list(memory.values(3, 8))
7303: [67, None, None, 120, 121]
7304: >>> list(islice(memory.values(3, ...), 7))
7305: [67, None, None, 120, 121, 122, None]
7306: """
7307: cdef:
7308: addr_t start_
7309: addr_t endex_
7310:
+7311: if start is None:
__pyx_t_1 = (__pyx_cur_scope->__pyx_v_start == Py_None); __pyx_t_2 = (__pyx_t_1 != 0); if (__pyx_t_2) { /* … */ goto __pyx_L4; }
+7312: start_ = self.start_()
__pyx_cur_scope->__pyx_v_start_ = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_cur_scope->__pyx_v_self->__pyx_vtab)->start_(__pyx_cur_scope->__pyx_v_self);
+7313: elif start is Ellipsis:
__pyx_t_2 = (__pyx_cur_scope->__pyx_v_start == __pyx_builtin_Ellipsis); __pyx_t_1 = (__pyx_t_2 != 0); if (__pyx_t_1) { /* … */ goto __pyx_L4; }
+7314: start_ = ADDR_MIN
__pyx_cur_scope->__pyx_v_start_ = ADDR_MIN;
7315: else:
+7316: start_ = <addr_t>start
/*else*/ { __pyx_t_3 = __Pyx_PyInt_As_uint_fast64_t(__pyx_cur_scope->__pyx_v_start); if (unlikely((__pyx_t_3 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 7316, __pyx_L1_error) __pyx_cur_scope->__pyx_v_start_ = ((addr_t)__pyx_t_3); } __pyx_L4:;
7317:
+7318: if endex is None:
__pyx_t_1 = (__pyx_cur_scope->__pyx_v_endex == Py_None); __pyx_t_2 = (__pyx_t_1 != 0); if (__pyx_t_2) { /* … */ goto __pyx_L5; }
+7319: endex_ = self.endex_()
__pyx_cur_scope->__pyx_v_endex_ = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_cur_scope->__pyx_v_self->__pyx_vtab)->endex_(__pyx_cur_scope->__pyx_v_self);
7320: else:
+7321: endex_ = <addr_t>endex
/*else*/ { __pyx_t_3 = __Pyx_PyInt_As_uint_fast64_t(__pyx_cur_scope->__pyx_v_endex); if (unlikely((__pyx_t_3 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 7321, __pyx_L1_error) __pyx_cur_scope->__pyx_v_endex_ = ((addr_t)__pyx_t_3); } __pyx_L5:;
7322:
+7323: yield from Rover(self, start_, endex_, pattern, False, start is Ellipsis)
__pyx_t_4 = __Pyx_PyInt_From_uint_fast64_t(__pyx_cur_scope->__pyx_v_start_); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 7323, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __pyx_t_5 = __Pyx_PyInt_From_uint_fast64_t(__pyx_cur_scope->__pyx_v_endex_); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 7323, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __pyx_t_2 = (__pyx_cur_scope->__pyx_v_start == __pyx_builtin_Ellipsis); __pyx_t_6 = __Pyx_PyBool_FromLong(__pyx_t_2); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 7323, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __pyx_t_7 = PyTuple_New(6); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 7323, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); __Pyx_INCREF(((PyObject *)__pyx_cur_scope->__pyx_v_self)); __Pyx_GIVEREF(((PyObject *)__pyx_cur_scope->__pyx_v_self)); PyTuple_SET_ITEM(__pyx_t_7, 0, ((PyObject *)__pyx_cur_scope->__pyx_v_self)); __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_7, 1, __pyx_t_4); __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_7, 2, __pyx_t_5); __Pyx_INCREF(__pyx_cur_scope->__pyx_v_pattern); __Pyx_GIVEREF(__pyx_cur_scope->__pyx_v_pattern); PyTuple_SET_ITEM(__pyx_t_7, 3, __pyx_cur_scope->__pyx_v_pattern); __Pyx_INCREF(Py_False); __Pyx_GIVEREF(Py_False); PyTuple_SET_ITEM(__pyx_t_7, 4, Py_False); __Pyx_GIVEREF(__pyx_t_6); PyTuple_SET_ITEM(__pyx_t_7, 5, __pyx_t_6); __pyx_t_4 = 0; __pyx_t_5 = 0; __pyx_t_6 = 0; __pyx_t_6 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_10bytesparse_2_c_Rover), __pyx_t_7, NULL); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 7323, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; __pyx_r = __Pyx_Generator_Yield_From(__pyx_generator, __pyx_t_6); __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_XGOTREF(__pyx_r); if (likely(__pyx_r)) { __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); __Pyx_Coroutine_ResetAndClearException(__pyx_generator); /* return from generator, yielding value */ __pyx_generator->resume_label = 1; return __pyx_r; __pyx_L6_resume_from_yield_from:; if (unlikely(!__pyx_sent_value)) __PYX_ERR(0, 7323, __pyx_L1_error) } else { PyObject* exc_type = __Pyx_PyErr_Occurred(); if (exc_type) { if (likely(exc_type == PyExc_StopIteration || (exc_type != PyExc_GeneratorExit && __Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration)))) PyErr_Clear(); else __PYX_ERR(0, 7323, __pyx_L1_error) } } CYTHON_MAYBE_UNUSED_VAR(__pyx_cur_scope);
7324:
+7325: def items(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_124items(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_123items[] = "Memory.items(self: u'Memory', start: Optional[Address] = None, endex: Optional[Union[Address, EllipsisType]] = None, pattern: Optional[Union[AnyBytes, Value]] = None) -> Iterator[Tuple[Address, Value]]\nIterates over address and value couples.\n\n Iterates over address and value couples, from `start` to `endex`.\n Implemets the interface of :obj:`dict`.\n\n Arguments:\n start (int):\n Inclusive start address.\n If ``None``, :attr:`start` is considered.\n\n endex (int):\n Exclusive end address.\n If ``None``, :attr:`endex` is considered.\n If ``Ellipsis``, the iterator is infinite.\n\n pattern (items):\n Pattern of values to fill emptiness.\n\n Yields:\n int: Range address and value couples.\n\n Examples:\n >>> from itertools import islice\n >>> memory = Memory()\n >>> list(memory.items(endex=8))\n [(0, None), (1, None), (2, None), (3, None), (4, None), (5, None), (6, None), (7, None)]\n >>> list(memory.items(3, 8))\n [(3, None), (4, None), (5, None), (6, None), (7, None)]\n >>> list(islice(memory.items(3, ...), 7))\n [(3, None), (4, None), (5, None), (6, None), (7, None), (8, None), (9, None)]\n\n ~~~\n\n +---+---+---+---+---+---+---+---+---+---+\n | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |\n +===+===+===+===+===+===+===+===+===+===+\n | |[A | B | C]| | |[x | y | z]| |\n +---+---+---+---+---+---+---+---+---+---+\n | | 65| 66| 67| | |120|121|122| |\n +---+---+---+---+---+---+---+---+---+---+\n\n >>> memory = Memory(blocks=[[1, b'ABC'], [6, b'xyz']])\n >>> list(memory.items())\n [(1, 65), (2, 66), (3, 67), (4, None), (5, None), (6, 120), (7, 121), (8, 122)]\n >>> list(mem""ory.items(3, 8))\n [(3, 67), (4, None), (5, None), (6, 120), (7, 121)]\n >>> list(islice(memory.items(3, ...), 7))\n [(3, 67), (4, None), (5, None), (6, 120), (7, 121), (8, 122), (9, None)]\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_124items(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_start = 0; PyObject *__pyx_v_endex = 0; PyObject *__pyx_v_pattern = 0; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("items (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_start,&__pyx_n_s_endex,&__pyx_n_s_pattern,0}; PyObject* values[3] = {0,0,0}; /* … */ /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_123items(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_start, PyObject *__pyx_v_endex, PyObject *__pyx_v_pattern) { struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_8_items *__pyx_cur_scope; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("items", 0); __pyx_cur_scope = (struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_8_items *)__pyx_tp_new_10bytesparse_2_c___pyx_scope_struct_8_items(__pyx_ptype_10bytesparse_2_c___pyx_scope_struct_8_items, __pyx_empty_tuple, NULL); if (unlikely(!__pyx_cur_scope)) { __pyx_cur_scope = ((struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_8_items *)Py_None); __Pyx_INCREF(Py_None); __PYX_ERR(0, 7325, __pyx_L1_error) } else { __Pyx_GOTREF(__pyx_cur_scope); } __pyx_cur_scope->__pyx_v_self = __pyx_v_self; __Pyx_INCREF((PyObject *)__pyx_cur_scope->__pyx_v_self); __Pyx_GIVEREF((PyObject *)__pyx_cur_scope->__pyx_v_self); __pyx_cur_scope->__pyx_v_start = __pyx_v_start; __Pyx_INCREF(__pyx_cur_scope->__pyx_v_start); __Pyx_GIVEREF(__pyx_cur_scope->__pyx_v_start); __pyx_cur_scope->__pyx_v_endex = __pyx_v_endex; __Pyx_INCREF(__pyx_cur_scope->__pyx_v_endex); __Pyx_GIVEREF(__pyx_cur_scope->__pyx_v_endex); __pyx_cur_scope->__pyx_v_pattern = __pyx_v_pattern; __Pyx_INCREF(__pyx_cur_scope->__pyx_v_pattern); __Pyx_GIVEREF(__pyx_cur_scope->__pyx_v_pattern); { __pyx_CoroutineObject *gen = __Pyx_Generator_New((__pyx_coroutine_body_t) __pyx_gb_10bytesparse_2_c_6Memory_125generator6, NULL, (PyObject *) __pyx_cur_scope, __pyx_n_s_items, __pyx_n_s_Memory_items, __pyx_n_s_bytesparse__c); if (unlikely(!gen)) __PYX_ERR(0, 7325, __pyx_L1_error) __Pyx_DECREF(__pyx_cur_scope); __Pyx_RefNannyFinishContext(); return (PyObject *) gen; } /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Memory.items", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __Pyx_DECREF(((PyObject *)__pyx_cur_scope)); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_gb_10bytesparse_2_c_6Memory_125generator6(__pyx_CoroutineObject *__pyx_generator, CYTHON_UNUSED PyThreadState *__pyx_tstate, PyObject *__pyx_sent_value) /* generator body */ { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("items", 0); __pyx_L3_first_run:; if (unlikely(!__pyx_sent_value)) __PYX_ERR(0, 7325, __pyx_L1_error) /* … */ /* function exit code */ PyErr_SetNone(PyExc_StopIteration); goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_5); __Pyx_XDECREF(__pyx_t_6); __Pyx_AddTraceback("items", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_L0:; __Pyx_XDECREF(__pyx_r); __pyx_r = 0; #if !CYTHON_USE_EXC_INFO_STACK __Pyx_Coroutine_ResetAndClearException(__pyx_generator); #endif __pyx_generator->resume_label = -1; __Pyx_Coroutine_clear((PyObject*)__pyx_generator); __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_gb_10bytesparse_2_c_6Memory_128generator7(__pyx_CoroutineObject *__pyx_generator, CYTHON_UNUSED PyThreadState *__pyx_tstate, PyObject *__pyx_sent_value); /* proto */ /* … */ struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_8_items { PyObject_HEAD PyObject *__pyx_v_endex; PyObject *__pyx_v_pattern; struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self; PyObject *__pyx_v_start; };
7326: self: 'Memory',
+7327: start: Optional[Address] = None,
values[0] = ((PyObject *)Py_None);
+7328: endex: Optional[Union[Address, EllipsisType]] = None,
values[1] = ((PyObject *)Py_None);
+7329: pattern: Optional[Union[AnyBytes, Value]] = None,
values[2] = ((PyObject *)Py_None); if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_start); if (value) { values[0] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 1: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_endex); if (value) { values[1] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 2: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pattern); if (value) { values[2] = value; kw_args--; } } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "items") < 0)) __PYX_ERR(0, 7325, __pyx_L3_error) } } else { switch (PyTuple_GET_SIZE(__pyx_args)) { case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } } __pyx_v_start = values[0]; __pyx_v_endex = values[1]; __pyx_v_pattern = values[2]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("items", 0, 0, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 7325, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("bytesparse._c.Memory.items", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_123items(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), __pyx_v_start, __pyx_v_endex, __pyx_v_pattern);
7330: ) -> Iterator[Tuple[Address, Value]]:
7331: r"""Iterates over address and value couples.
7332:
7333: Iterates over address and value couples, from `start` to `endex`.
7334: Implemets the interface of :obj:`dict`.
7335:
7336: Arguments:
7337: start (int):
7338: Inclusive start address.
7339: If ``None``, :attr:`start` is considered.
7340:
7341: endex (int):
7342: Exclusive end address.
7343: If ``None``, :attr:`endex` is considered.
7344: If ``Ellipsis``, the iterator is infinite.
7345:
7346: pattern (items):
7347: Pattern of values to fill emptiness.
7348:
7349: Yields:
7350: int: Range address and value couples.
7351:
7352: Examples:
7353: >>> from itertools import islice
7354: >>> memory = Memory()
7355: >>> list(memory.items(endex=8))
7356: [(0, None), (1, None), (2, None), (3, None), (4, None), (5, None), (6, None), (7, None)]
7357: >>> list(memory.items(3, 8))
7358: [(3, None), (4, None), (5, None), (6, None), (7, None)]
7359: >>> list(islice(memory.items(3, ...), 7))
7360: [(3, None), (4, None), (5, None), (6, None), (7, None), (8, None), (9, None)]
7361:
7362: ~~~
7363:
7364: +---+---+---+---+---+---+---+---+---+---+
7365: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
7366: +===+===+===+===+===+===+===+===+===+===+
7367: | |[A | B | C]| | |[x | y | z]| |
7368: +---+---+---+---+---+---+---+---+---+---+
7369: | | 65| 66| 67| | |120|121|122| |
7370: +---+---+---+---+---+---+---+---+---+---+
7371:
7372: >>> memory = Memory(blocks=[[1, b'ABC'], [6, b'xyz']])
7373: >>> list(memory.items())
7374: [(1, 65), (2, 66), (3, 67), (4, None), (5, None), (6, 120), (7, 121), (8, 122)]
7375: >>> list(memory.items(3, 8))
7376: [(3, 67), (4, None), (5, None), (6, 120), (7, 121)]
7377: >>> list(islice(memory.items(3, ...), 7))
7378: [(3, 67), (4, None), (5, None), (6, 120), (7, 121), (8, 122), (9, None)]
7379: """
7380:
+7381: yield from zip(self.keys(start, endex), self.values(start, endex, pattern))
__pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_cur_scope->__pyx_v_self), __pyx_n_s_keys); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 7381, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_3 = NULL; __pyx_t_4 = 0; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); if (likely(__pyx_t_3)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); __Pyx_INCREF(__pyx_t_3); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_2, function); __pyx_t_4 = 1; } } #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_2)) { PyObject *__pyx_temp[3] = {__pyx_t_3, __pyx_cur_scope->__pyx_v_start, __pyx_cur_scope->__pyx_v_endex}; __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_2, __pyx_temp+1-__pyx_t_4, 2+__pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 7381, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_GOTREF(__pyx_t_1); } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_2)) { PyObject *__pyx_temp[3] = {__pyx_t_3, __pyx_cur_scope->__pyx_v_start, __pyx_cur_scope->__pyx_v_endex}; __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_2, __pyx_temp+1-__pyx_t_4, 2+__pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 7381, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_GOTREF(__pyx_t_1); } else #endif { __pyx_t_5 = PyTuple_New(2+__pyx_t_4); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 7381, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); if (__pyx_t_3) { __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_3); __pyx_t_3 = NULL; } __Pyx_INCREF(__pyx_cur_scope->__pyx_v_start); __Pyx_GIVEREF(__pyx_cur_scope->__pyx_v_start); PyTuple_SET_ITEM(__pyx_t_5, 0+__pyx_t_4, __pyx_cur_scope->__pyx_v_start); __Pyx_INCREF(__pyx_cur_scope->__pyx_v_endex); __Pyx_GIVEREF(__pyx_cur_scope->__pyx_v_endex); PyTuple_SET_ITEM(__pyx_t_5, 1+__pyx_t_4, __pyx_cur_scope->__pyx_v_endex); __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_5, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 7381, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; } __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_t_5 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_cur_scope->__pyx_v_self), __pyx_n_s_values); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 7381, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __pyx_t_3 = NULL; __pyx_t_4 = 0; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_5))) { __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_5); if (likely(__pyx_t_3)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5); __Pyx_INCREF(__pyx_t_3); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_5, function); __pyx_t_4 = 1; } } #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_5)) { PyObject *__pyx_temp[4] = {__pyx_t_3, __pyx_cur_scope->__pyx_v_start, __pyx_cur_scope->__pyx_v_endex, __pyx_cur_scope->__pyx_v_pattern}; __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_5, __pyx_temp+1-__pyx_t_4, 3+__pyx_t_4); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 7381, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_GOTREF(__pyx_t_2); } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_5)) { PyObject *__pyx_temp[4] = {__pyx_t_3, __pyx_cur_scope->__pyx_v_start, __pyx_cur_scope->__pyx_v_endex, __pyx_cur_scope->__pyx_v_pattern}; __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_5, __pyx_temp+1-__pyx_t_4, 3+__pyx_t_4); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 7381, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_GOTREF(__pyx_t_2); } else #endif { __pyx_t_6 = PyTuple_New(3+__pyx_t_4); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 7381, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); if (__pyx_t_3) { __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_3); __pyx_t_3 = NULL; } __Pyx_INCREF(__pyx_cur_scope->__pyx_v_start); __Pyx_GIVEREF(__pyx_cur_scope->__pyx_v_start); PyTuple_SET_ITEM(__pyx_t_6, 0+__pyx_t_4, __pyx_cur_scope->__pyx_v_start); __Pyx_INCREF(__pyx_cur_scope->__pyx_v_endex); __Pyx_GIVEREF(__pyx_cur_scope->__pyx_v_endex); PyTuple_SET_ITEM(__pyx_t_6, 1+__pyx_t_4, __pyx_cur_scope->__pyx_v_endex); __Pyx_INCREF(__pyx_cur_scope->__pyx_v_pattern); __Pyx_GIVEREF(__pyx_cur_scope->__pyx_v_pattern); PyTuple_SET_ITEM(__pyx_t_6, 2+__pyx_t_4, __pyx_cur_scope->__pyx_v_pattern); __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_5, __pyx_t_6, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 7381, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; } __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; __pyx_t_5 = PyTuple_New(2); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 7381, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __Pyx_GIVEREF(__pyx_t_1); PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_1); __Pyx_GIVEREF(__pyx_t_2); PyTuple_SET_ITEM(__pyx_t_5, 1, __pyx_t_2); __pyx_t_1 = 0; __pyx_t_2 = 0; __pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_zip, __pyx_t_5, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 7381, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; __pyx_r = __Pyx_Generator_Yield_From(__pyx_generator, __pyx_t_2); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __Pyx_XGOTREF(__pyx_r); if (likely(__pyx_r)) { __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); __Pyx_Coroutine_ResetAndClearException(__pyx_generator); /* return from generator, yielding value */ __pyx_generator->resume_label = 1; return __pyx_r; __pyx_L4_resume_from_yield_from:; if (unlikely(!__pyx_sent_value)) __PYX_ERR(0, 7381, __pyx_L1_error) } else { PyObject* exc_type = __Pyx_PyErr_Occurred(); if (exc_type) { if (likely(exc_type == PyExc_StopIteration || (exc_type != PyExc_GeneratorExit && __Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration)))) PyErr_Clear(); else __PYX_ERR(0, 7381, __pyx_L1_error) } } CYTHON_MAYBE_UNUSED_VAR(__pyx_cur_scope);
7382:
+7383: def intervals(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_127intervals(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_126intervals[] = "Memory.intervals(self: u'Memory', start: Optional[Address] = None, endex: Optional[Address] = None) -> Iterator[ClosedInterval]\nIterates over block intervals.\n\n Iterates over data boundaries within an address range.\n\n Arguments:\n start (int):\n Inclusive start address.\n If ``None``, :attr:`start` is considered.\n\n endex (int):\n Exclusive end address.\n If ``None``, :attr:`endex` is considered.\n\n Yields:\n couple of addresses: Block data interval boundaries.\n\n Example:\n +---+---+---+---+---+---+---+---+---+---+---+\n | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10|\n +===+===+===+===+===+===+===+===+===+===+===+\n | |[A | B]| | |[x]| |[1 | 2 | 3]| |\n +---+---+---+---+---+---+---+---+---+---+---+\n\n >>> memory = Memory(blocks=[[1, b'AB'], [5, b'x'], [7, b'123']])\n >>> list(memory.intervals())\n [(1, 3), (5, 6), (7, 10)]\n >>> list(memory.intervals(2, 9))\n [(2, 3), (5, 6), (7, 9)]\n >>> list(memory.intervals(3, 5))\n []\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_127intervals(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_start = 0; PyObject *__pyx_v_endex = 0; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("intervals (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_start,&__pyx_n_s_endex,0}; PyObject* values[2] = {0,0}; /* … */ /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_126intervals(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_start, PyObject *__pyx_v_endex) { struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_9_intervals *__pyx_cur_scope; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("intervals", 0); __pyx_cur_scope = (struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_9_intervals *)__pyx_tp_new_10bytesparse_2_c___pyx_scope_struct_9_intervals(__pyx_ptype_10bytesparse_2_c___pyx_scope_struct_9_intervals, __pyx_empty_tuple, NULL); if (unlikely(!__pyx_cur_scope)) { __pyx_cur_scope = ((struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_9_intervals *)Py_None); __Pyx_INCREF(Py_None); __PYX_ERR(0, 7383, __pyx_L1_error) } else { __Pyx_GOTREF(__pyx_cur_scope); } __pyx_cur_scope->__pyx_v_self = __pyx_v_self; __Pyx_INCREF((PyObject *)__pyx_cur_scope->__pyx_v_self); __Pyx_GIVEREF((PyObject *)__pyx_cur_scope->__pyx_v_self); __pyx_cur_scope->__pyx_v_start = __pyx_v_start; __Pyx_INCREF(__pyx_cur_scope->__pyx_v_start); __Pyx_GIVEREF(__pyx_cur_scope->__pyx_v_start); __pyx_cur_scope->__pyx_v_endex = __pyx_v_endex; __Pyx_INCREF(__pyx_cur_scope->__pyx_v_endex); __Pyx_GIVEREF(__pyx_cur_scope->__pyx_v_endex); { __pyx_CoroutineObject *gen = __Pyx_Generator_New((__pyx_coroutine_body_t) __pyx_gb_10bytesparse_2_c_6Memory_128generator7, NULL, (PyObject *) __pyx_cur_scope, __pyx_n_s_intervals, __pyx_n_s_Memory_intervals, __pyx_n_s_bytesparse__c); if (unlikely(!gen)) __PYX_ERR(0, 7383, __pyx_L1_error) __Pyx_DECREF(__pyx_cur_scope); __Pyx_RefNannyFinishContext(); return (PyObject *) gen; } /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Memory.intervals", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __Pyx_DECREF(((PyObject *)__pyx_cur_scope)); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_gb_10bytesparse_2_c_6Memory_128generator7(__pyx_CoroutineObject *__pyx_generator, CYTHON_UNUSED PyThreadState *__pyx_tstate, PyObject *__pyx_sent_value) /* generator body */ { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("intervals", 0); __pyx_L3_first_run:; if (unlikely(!__pyx_sent_value)) __PYX_ERR(0, 7383, __pyx_L1_error) /* … */ /* function exit code */ PyErr_SetNone(PyExc_StopIteration); goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_10); __Pyx_XDECREF(__pyx_t_11); __Pyx_XDECREF(__pyx_t_12); __Pyx_AddTraceback("intervals", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_L0:; __Pyx_XDECREF(__pyx_r); __pyx_r = 0; #if !CYTHON_USE_EXC_INFO_STACK __Pyx_Coroutine_ResetAndClearException(__pyx_generator); #endif __pyx_generator->resume_label = -1; __Pyx_Coroutine_clear((PyObject*)__pyx_generator); __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_gb_10bytesparse_2_c_6Memory_131generator8(__pyx_CoroutineObject *__pyx_generator, CYTHON_UNUSED PyThreadState *__pyx_tstate, PyObject *__pyx_sent_value); /* proto */ /* … */ struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_9_intervals { PyObject_HEAD Block_ const *__pyx_v_block; size_t __pyx_v_block_count; addr_t __pyx_v_block_endex; size_t __pyx_v_block_index; size_t __pyx_v_block_index_endex; size_t __pyx_v_block_index_start; addr_t __pyx_v_block_start; Rack_ const *__pyx_v_blocks; PyObject *__pyx_v_endex; addr_t __pyx_v_endex_; struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self; size_t __pyx_v_slice_endex; size_t __pyx_v_slice_start; PyObject *__pyx_v_start; addr_t __pyx_v_start_; size_t __pyx_t_0; size_t __pyx_t_1; size_t __pyx_t_2; };
7384: self: 'Memory',
+7385: start: Optional[Address] = None,
values[0] = ((PyObject *)Py_None);
+7386: endex: Optional[Address] = None,
values[1] = ((PyObject *)Py_None); if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_start); if (value) { values[0] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 1: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_endex); if (value) { values[1] = value; kw_args--; } } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "intervals") < 0)) __PYX_ERR(0, 7383, __pyx_L3_error) } } else { switch (PyTuple_GET_SIZE(__pyx_args)) { case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } } __pyx_v_start = values[0]; __pyx_v_endex = values[1]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("intervals", 0, 0, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 7383, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("bytesparse._c.Memory.intervals", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_126intervals(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), __pyx_v_start, __pyx_v_endex);
7387: ) -> Iterator[ClosedInterval]:
7388: r"""Iterates over block intervals.
7389:
7390: Iterates over data boundaries within an address range.
7391:
7392: Arguments:
7393: start (int):
7394: Inclusive start address.
7395: If ``None``, :attr:`start` is considered.
7396:
7397: endex (int):
7398: Exclusive end address.
7399: If ``None``, :attr:`endex` is considered.
7400:
7401: Yields:
7402: couple of addresses: Block data interval boundaries.
7403:
7404: Example:
7405: +---+---+---+---+---+---+---+---+---+---+---+
7406: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10|
7407: +===+===+===+===+===+===+===+===+===+===+===+
7408: | |[A | B]| | |[x]| |[1 | 2 | 3]| |
7409: +---+---+---+---+---+---+---+---+---+---+---+
7410:
7411: >>> memory = Memory(blocks=[[1, b'AB'], [5, b'x'], [7, b'123']])
7412: >>> list(memory.intervals())
7413: [(1, 3), (5, 6), (7, 10)]
7414: >>> list(memory.intervals(2, 9))
7415: [(2, 3), (5, 6), (7, 9)]
7416: >>> list(memory.intervals(3, 5))
7417: []
7418: """
7419: cdef:
7420: addr_t start_
7421: addr_t endex_
+7422: const Rack_* blocks = self._
__pyx_t_1 = __pyx_cur_scope->__pyx_v_self->_; __pyx_cur_scope->__pyx_v_blocks = __pyx_t_1;
+7423: size_t block_count = Rack_Length(blocks)
__pyx_cur_scope->__pyx_v_block_count = __pyx_f_10bytesparse_2_c_Rack_Length(__pyx_cur_scope->__pyx_v_blocks);
7424: size_t block_index
7425: size_t block_index_start
7426: size_t block_index_endex
7427: const Block_* block
7428: addr_t block_start
7429: addr_t block_endex
7430: size_t slice_start
7431: size_t slice_endex
7432:
+7433: if block_count:
__pyx_t_2 = (__pyx_cur_scope->__pyx_v_block_count != 0); if (__pyx_t_2) { /* … */ } CYTHON_MAYBE_UNUSED_VAR(__pyx_cur_scope);
+7434: block_index_start = 0 if start is None else Rack_IndexStart(blocks, <addr_t>start)
__pyx_t_2 = (__pyx_cur_scope->__pyx_v_start == Py_None); if ((__pyx_t_2 != 0)) { __pyx_t_3 = 0; } else { __pyx_t_4 = __Pyx_PyInt_As_uint_fast64_t(__pyx_cur_scope->__pyx_v_start); if (unlikely((__pyx_t_4 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 7434, __pyx_L1_error) __pyx_t_5 = __pyx_f_10bytesparse_2_c_Rack_IndexStart(__pyx_cur_scope->__pyx_v_blocks, ((addr_t)__pyx_t_4)); if (unlikely(__pyx_t_5 == ((Py_ssize_t)-2L))) __PYX_ERR(0, 7434, __pyx_L1_error) __pyx_t_3 = __pyx_t_5; } __pyx_cur_scope->__pyx_v_block_index_start = __pyx_t_3;
+7435: block_index_endex = block_count if endex is None else Rack_IndexEndex(blocks, <addr_t>endex)
__pyx_t_2 = (__pyx_cur_scope->__pyx_v_endex == Py_None); if ((__pyx_t_2 != 0)) { __pyx_t_3 = __pyx_cur_scope->__pyx_v_block_count; } else { __pyx_t_4 = __Pyx_PyInt_As_uint_fast64_t(__pyx_cur_scope->__pyx_v_endex); if (unlikely((__pyx_t_4 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 7435, __pyx_L1_error) __pyx_t_5 = __pyx_f_10bytesparse_2_c_Rack_IndexEndex(__pyx_cur_scope->__pyx_v_blocks, ((addr_t)__pyx_t_4)); if (unlikely(__pyx_t_5 == ((Py_ssize_t)-2L))) __PYX_ERR(0, 7435, __pyx_L1_error) __pyx_t_3 = __pyx_t_5; } __pyx_cur_scope->__pyx_v_block_index_endex = __pyx_t_3;
+7436: start_, endex_ = self.bound_(start, endex)
__pyx_t_6 = ((struct __pyx_vtabstruct_10bytesparse_2_c_Memory *)__pyx_cur_scope->__pyx_v_self->__pyx_vtab)->bound_(__pyx_cur_scope->__pyx_v_self, __pyx_cur_scope->__pyx_v_start, __pyx_cur_scope->__pyx_v_endex); __pyx_t_4 = __pyx_t_6.f0; __pyx_t_7 = __pyx_t_6.f1; __pyx_cur_scope->__pyx_v_start_ = __pyx_t_4; __pyx_cur_scope->__pyx_v_endex_ = __pyx_t_7;
7437:
+7438: for block_index in range(block_index_start, block_index_endex):
__pyx_t_3 = __pyx_cur_scope->__pyx_v_block_index_endex; __pyx_t_8 = __pyx_t_3; for (__pyx_t_9 = __pyx_cur_scope->__pyx_v_block_index_start; __pyx_t_9 < __pyx_t_8; __pyx_t_9+=1) { __pyx_cur_scope->__pyx_v_block_index = __pyx_t_9;
+7439: block = Rack_Get__(blocks, block_index)
__pyx_cur_scope->__pyx_v_block = __pyx_f_10bytesparse_2_c_Rack_Get__(__pyx_cur_scope->__pyx_v_blocks, __pyx_cur_scope->__pyx_v_block_index);
+7440: block_start = Block_Start(block)
__pyx_cur_scope->__pyx_v_block_start = __pyx_f_10bytesparse_2_c_Block_Start(__pyx_cur_scope->__pyx_v_block);
+7441: block_endex = Block_Endex(block)
__pyx_cur_scope->__pyx_v_block_endex = __pyx_f_10bytesparse_2_c_Block_Endex(__pyx_cur_scope->__pyx_v_block);
+7442: slice_start = block_start if start_ < block_start else start_
if (((__pyx_cur_scope->__pyx_v_start_ < __pyx_cur_scope->__pyx_v_block_start) != 0)) { __pyx_t_7 = __pyx_cur_scope->__pyx_v_block_start; } else { __pyx_t_7 = __pyx_cur_scope->__pyx_v_start_; } __pyx_cur_scope->__pyx_v_slice_start = __pyx_t_7;
+7443: slice_endex = endex_ if endex_ < block_endex else block_endex
if (((__pyx_cur_scope->__pyx_v_endex_ < __pyx_cur_scope->__pyx_v_block_endex) != 0)) { __pyx_t_7 = __pyx_cur_scope->__pyx_v_endex_; } else { __pyx_t_7 = __pyx_cur_scope->__pyx_v_block_endex; } __pyx_cur_scope->__pyx_v_slice_endex = __pyx_t_7;
+7444: if slice_start < slice_endex:
__pyx_t_2 = ((__pyx_cur_scope->__pyx_v_slice_start < __pyx_cur_scope->__pyx_v_slice_endex) != 0); if (__pyx_t_2) { /* … */ } }
+7445: yield slice_start, slice_endex
__pyx_t_10 = __Pyx_PyInt_FromSize_t(__pyx_cur_scope->__pyx_v_slice_start); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 7445, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_10); __pyx_t_11 = __Pyx_PyInt_FromSize_t(__pyx_cur_scope->__pyx_v_slice_endex); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 7445, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_11); __pyx_t_12 = PyTuple_New(2); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 7445, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_12); __Pyx_GIVEREF(__pyx_t_10); PyTuple_SET_ITEM(__pyx_t_12, 0, __pyx_t_10); __Pyx_GIVEREF(__pyx_t_11); PyTuple_SET_ITEM(__pyx_t_12, 1, __pyx_t_11); __pyx_t_10 = 0; __pyx_t_11 = 0; __pyx_r = __pyx_t_12; __pyx_t_12 = 0; __pyx_cur_scope->__pyx_t_0 = __pyx_t_3; __pyx_cur_scope->__pyx_t_1 = __pyx_t_8; __pyx_cur_scope->__pyx_t_2 = __pyx_t_9; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); __Pyx_Coroutine_ResetAndClearException(__pyx_generator); /* return from generator, yielding value */ __pyx_generator->resume_label = 1; return __pyx_r; __pyx_L8_resume_from_yield:; __pyx_t_3 = __pyx_cur_scope->__pyx_t_0; __pyx_t_8 = __pyx_cur_scope->__pyx_t_1; __pyx_t_9 = __pyx_cur_scope->__pyx_t_2; if (unlikely(!__pyx_sent_value)) __PYX_ERR(0, 7445, __pyx_L1_error)
7446:
+7447: def gaps(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_130gaps(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_129gaps[] = "Memory.gaps(self: u'Memory', start: Optional[Address] = None, endex: Optional[Address] = None, bound: bool = False) -> Iterator[OpenInterval]\nIterates over block gaps.\n\n Iterates over gaps emptiness bounds within an address range.\n If a yielded bound is ``None``, that direction is infinitely empty\n (valid before or after global data bounds).\n\n Arguments:\n start (int):\n Inclusive start address.\n If ``None``, :attr:`start` is considered.\n\n endex (int):\n Exclusive end address.\n If ``None``, :attr:`endex` is considered.\n\n bound (bool):\n Only gaps within blocks are considered; emptiness before and\n after global data bounds are ignored.\n\n Yields:\n couple of addresses: Block data interval boundaries.\n\n Example:\n +---+---+---+---+---+---+---+---+---+---+---+\n | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10|\n +===+===+===+===+===+===+===+===+===+===+===+\n | |[A | B]| | |[x]| |[1 | 2 | 3]| |\n +---+---+---+---+---+---+---+---+---+---+---+\n\n >>> memory = Memory(blocks=[[1, b'AB'], [5, b'x'], [7, b'123']])\n >>> list(memory.gaps())\n [(None, 1), (3, 5), (6, 7), (10, None)]\n >>> list(memory.gaps(bound=True))\n [(3, 5), (6, 7)]\n >>> list(memory.gaps(2, 6))\n [(3, 5)]\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_130gaps(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_start = 0; PyObject *__pyx_v_endex = 0; PyObject *__pyx_v_bound = 0; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("gaps (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_start,&__pyx_n_s_endex,&__pyx_n_s_bound,0}; PyObject* values[3] = {0,0,0}; /* … */ /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_129gaps(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_start, PyObject *__pyx_v_endex, PyObject *__pyx_v_bound) { struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_10_gaps *__pyx_cur_scope; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("gaps", 0); __pyx_cur_scope = (struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_10_gaps *)__pyx_tp_new_10bytesparse_2_c___pyx_scope_struct_10_gaps(__pyx_ptype_10bytesparse_2_c___pyx_scope_struct_10_gaps, __pyx_empty_tuple, NULL); if (unlikely(!__pyx_cur_scope)) { __pyx_cur_scope = ((struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_10_gaps *)Py_None); __Pyx_INCREF(Py_None); __PYX_ERR(0, 7447, __pyx_L1_error) } else { __Pyx_GOTREF(__pyx_cur_scope); } __pyx_cur_scope->__pyx_v_self = __pyx_v_self; __Pyx_INCREF((PyObject *)__pyx_cur_scope->__pyx_v_self); __Pyx_GIVEREF((PyObject *)__pyx_cur_scope->__pyx_v_self); __pyx_cur_scope->__pyx_v_start = __pyx_v_start; __Pyx_INCREF(__pyx_cur_scope->__pyx_v_start); __Pyx_GIVEREF(__pyx_cur_scope->__pyx_v_start); __pyx_cur_scope->__pyx_v_endex = __pyx_v_endex; __Pyx_INCREF(__pyx_cur_scope->__pyx_v_endex); __Pyx_GIVEREF(__pyx_cur_scope->__pyx_v_endex); __pyx_cur_scope->__pyx_v_bound = __pyx_v_bound; __Pyx_INCREF(__pyx_cur_scope->__pyx_v_bound); __Pyx_GIVEREF(__pyx_cur_scope->__pyx_v_bound); { __pyx_CoroutineObject *gen = __Pyx_Generator_New((__pyx_coroutine_body_t) __pyx_gb_10bytesparse_2_c_6Memory_131generator8, NULL, (PyObject *) __pyx_cur_scope, __pyx_n_s_gaps, __pyx_n_s_Memory_gaps, __pyx_n_s_bytesparse__c); if (unlikely(!gen)) __PYX_ERR(0, 7447, __pyx_L1_error) __Pyx_DECREF(__pyx_cur_scope); __Pyx_RefNannyFinishContext(); return (PyObject *) gen; } /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("bytesparse._c.Memory.gaps", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __Pyx_DECREF(((PyObject *)__pyx_cur_scope)); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_gb_10bytesparse_2_c_6Memory_131generator8(__pyx_CoroutineObject *__pyx_generator, CYTHON_UNUSED PyThreadState *__pyx_tstate, PyObject *__pyx_sent_value) /* generator body */ { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("gaps", 0); __pyx_L3_first_run:; if (unlikely(!__pyx_sent_value)) __PYX_ERR(0, 7447, __pyx_L1_error) /* … */ /* function exit code */ PyErr_SetNone(PyExc_StopIteration); goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_4); __Pyx_XDECREF(__pyx_t_5); __Pyx_XDECREF(__pyx_t_7); __Pyx_AddTraceback("gaps", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_L0:; __Pyx_XDECREF(__pyx_r); __pyx_r = 0; #if !CYTHON_USE_EXC_INFO_STACK __Pyx_Coroutine_ResetAndClearException(__pyx_generator); #endif __pyx_generator->resume_label = -1; __Pyx_Coroutine_clear((PyObject*)__pyx_generator); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* … */ struct __pyx_obj_10bytesparse_2_c___pyx_scope_struct_10_gaps { PyObject_HEAD Block_ const *__pyx_v_block; size_t __pyx_v_block_count; addr_t __pyx_v_block_endex; size_t __pyx_v_block_index; size_t __pyx_v_block_index_endex; size_t __pyx_v_block_index_start; addr_t __pyx_v_block_start; Rack_ const *__pyx_v_blocks; PyObject *__pyx_v_bound; int __pyx_v_bound_; PyObject *__pyx_v_endex; addr_t __pyx_v_endex_; PyObject *__pyx_v_endex__; struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self; PyObject *__pyx_v_start; addr_t __pyx_v_start_; PyObject *__pyx_v_start__; size_t __pyx_t_0; size_t __pyx_t_1; size_t __pyx_t_2; };
7448: self: 'Memory',
+7449: start: Optional[Address] = None,
values[0] = ((PyObject *)Py_None);
+7450: endex: Optional[Address] = None,
values[1] = ((PyObject *)Py_None);
+7451: bound: bool = False,
values[2] = ((PyObject *)Py_False); if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_start); if (value) { values[0] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 1: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_endex); if (value) { values[1] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 2: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_bound); if (value) { values[2] = value; kw_args--; } } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "gaps") < 0)) __PYX_ERR(0, 7447, __pyx_L3_error) } } else { switch (PyTuple_GET_SIZE(__pyx_args)) { case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } } __pyx_v_start = values[0]; __pyx_v_endex = values[1]; __pyx_v_bound = values[2]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("gaps", 0, 0, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 7447, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("bytesparse._c.Memory.gaps", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_129gaps(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), __pyx_v_start, __pyx_v_endex, __pyx_v_bound);
7452: ) -> Iterator[OpenInterval]:
7453: r"""Iterates over block gaps.
7454:
7455: Iterates over gaps emptiness bounds within an address range.
7456: If a yielded bound is ``None``, that direction is infinitely empty
7457: (valid before or after global data bounds).
7458:
7459: Arguments:
7460: start (int):
7461: Inclusive start address.
7462: If ``None``, :attr:`start` is considered.
7463:
7464: endex (int):
7465: Exclusive end address.
7466: If ``None``, :attr:`endex` is considered.
7467:
7468: bound (bool):
7469: Only gaps within blocks are considered; emptiness before and
7470: after global data bounds are ignored.
7471:
7472: Yields:
7473: couple of addresses: Block data interval boundaries.
7474:
7475: Example:
7476: +---+---+---+---+---+---+---+---+---+---+---+
7477: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10|
7478: +===+===+===+===+===+===+===+===+===+===+===+
7479: | |[A | B]| | |[x]| |[1 | 2 | 3]| |
7480: +---+---+---+---+---+---+---+---+---+---+---+
7481:
7482: >>> memory = Memory(blocks=[[1, b'AB'], [5, b'x'], [7, b'123']])
7483: >>> list(memory.gaps())
7484: [(None, 1), (3, 5), (6, 7), (10, None)]
7485: >>> list(memory.gaps(bound=True))
7486: [(3, 5), (6, 7)]
7487: >>> list(memory.gaps(2, 6))
7488: [(3, 5)]
7489: """
7490: cdef:
7491: addr_t start_
7492: addr_t endex_
+7493: bint bound_ = <bint>bound
__pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_cur_scope->__pyx_v_bound); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 7493, __pyx_L1_error) __pyx_cur_scope->__pyx_v_bound_ = __pyx_t_1;
+7494: const Rack_* blocks = self._
__pyx_t_2 = __pyx_cur_scope->__pyx_v_self->_; __pyx_cur_scope->__pyx_v_blocks = __pyx_t_2;
+7495: size_t block_count = Rack_Length(blocks)
__pyx_cur_scope->__pyx_v_block_count = __pyx_f_10bytesparse_2_c_Rack_Length(__pyx_cur_scope->__pyx_v_blocks);
7496: size_t block_index
7497: size_t block_index_start
7498: size_t block_index_endex
7499: const Block_* block
7500: addr_t block_start
7501: addr_t block_endex
7502:
+7503: if block_count:
__pyx_t_1 = (__pyx_cur_scope->__pyx_v_block_count != 0); if (__pyx_t_1) { /* … */ goto __pyx_L4; }
+7504: start__ = start
__Pyx_INCREF(__pyx_cur_scope->__pyx_v_start); __Pyx_GIVEREF(__pyx_cur_scope->__pyx_v_start); __pyx_cur_scope->__pyx_v_start__ = __pyx_cur_scope->__pyx_v_start;
+7505: endex__ = endex
__Pyx_INCREF(__pyx_cur_scope->__pyx_v_endex); __Pyx_GIVEREF(__pyx_cur_scope->__pyx_v_endex); __pyx_cur_scope->__pyx_v_endex__ = __pyx_cur_scope->__pyx_v_endex;
+7506: start_, endex_ = self.bound(start, endex)
__pyx_t_4 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_cur_scope->__pyx_v_self), __pyx_n_s_bound); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 7506, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __pyx_t_5 = NULL; __pyx_t_6 = 0; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); if (likely(__pyx_t_5)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); __Pyx_INCREF(__pyx_t_5); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_4, function); __pyx_t_6 = 1; } } #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_4)) { PyObject *__pyx_temp[3] = {__pyx_t_5, __pyx_cur_scope->__pyx_v_start, __pyx_cur_scope->__pyx_v_endex}; __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 7506, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; __Pyx_GOTREF(__pyx_t_3); } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { PyObject *__pyx_temp[3] = {__pyx_t_5, __pyx_cur_scope->__pyx_v_start, __pyx_cur_scope->__pyx_v_endex}; __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 7506, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; __Pyx_GOTREF(__pyx_t_3); } else #endif { __pyx_t_7 = PyTuple_New(2+__pyx_t_6); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 7506, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); if (__pyx_t_5) { __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_5); __pyx_t_5 = NULL; } __Pyx_INCREF(__pyx_cur_scope->__pyx_v_start); __Pyx_GIVEREF(__pyx_cur_scope->__pyx_v_start); PyTuple_SET_ITEM(__pyx_t_7, 0+__pyx_t_6, __pyx_cur_scope->__pyx_v_start); __Pyx_INCREF(__pyx_cur_scope->__pyx_v_endex); __Pyx_GIVEREF(__pyx_cur_scope->__pyx_v_endex); PyTuple_SET_ITEM(__pyx_t_7, 1+__pyx_t_6, __pyx_cur_scope->__pyx_v_endex); __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_7, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 7506, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; } __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; if ((likely(PyTuple_CheckExact(__pyx_t_3))) || (PyList_CheckExact(__pyx_t_3))) { PyObject* sequence = __pyx_t_3; Py_ssize_t size = __Pyx_PySequence_SIZE(sequence); if (unlikely(size != 2)) { if (size > 2) __Pyx_RaiseTooManyValuesError(2); else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); __PYX_ERR(0, 7506, __pyx_L1_error) } #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS if (likely(PyTuple_CheckExact(sequence))) { __pyx_t_4 = PyTuple_GET_ITEM(sequence, 0); __pyx_t_7 = PyTuple_GET_ITEM(sequence, 1); } else { __pyx_t_4 = PyList_GET_ITEM(sequence, 0); __pyx_t_7 = PyList_GET_ITEM(sequence, 1); } __Pyx_INCREF(__pyx_t_4); __Pyx_INCREF(__pyx_t_7); #else __pyx_t_4 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 7506, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __pyx_t_7 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 7506, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); #endif __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; } else { Py_ssize_t index = -1; __pyx_t_5 = PyObject_GetIter(__pyx_t_3); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 7506, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __pyx_t_8 = Py_TYPE(__pyx_t_5)->tp_iternext; index = 0; __pyx_t_4 = __pyx_t_8(__pyx_t_5); if (unlikely(!__pyx_t_4)) goto __pyx_L5_unpacking_failed; __Pyx_GOTREF(__pyx_t_4); index = 1; __pyx_t_7 = __pyx_t_8(__pyx_t_5); if (unlikely(!__pyx_t_7)) goto __pyx_L5_unpacking_failed; __Pyx_GOTREF(__pyx_t_7); if (__Pyx_IternextUnpackEndCheck(__pyx_t_8(__pyx_t_5), 2) < 0) __PYX_ERR(0, 7506, __pyx_L1_error) __pyx_t_8 = NULL; __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; goto __pyx_L6_unpacking_done; __pyx_L5_unpacking_failed:; __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; __pyx_t_8 = NULL; if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); __PYX_ERR(0, 7506, __pyx_L1_error) __pyx_L6_unpacking_done:; } __pyx_t_9 = __Pyx_PyInt_As_uint_fast64_t(__pyx_t_4); if (unlikely((__pyx_t_9 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 7506, __pyx_L1_error) __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __pyx_t_10 = __Pyx_PyInt_As_uint_fast64_t(__pyx_t_7); if (unlikely((__pyx_t_10 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 7506, __pyx_L1_error) __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; __pyx_cur_scope->__pyx_v_start_ = __pyx_t_9; __pyx_cur_scope->__pyx_v_endex_ = __pyx_t_10;
7507:
+7508: if start__ is None:
__pyx_t_1 = (__pyx_cur_scope->__pyx_v_start__ == Py_None); __pyx_t_11 = (__pyx_t_1 != 0); if (__pyx_t_11) { /* … */ goto __pyx_L7; }
+7509: if not bound_:
__pyx_t_11 = ((!(__pyx_cur_scope->__pyx_v_bound_ != 0)) != 0); if (__pyx_t_11) { /* … */ }
+7510: block = Rack_First__(blocks)
__pyx_cur_scope->__pyx_v_block = __pyx_f_10bytesparse_2_c_Rack_First__(__pyx_cur_scope->__pyx_v_blocks);
+7511: start_ = Block_Start(block) # override trim start
__pyx_cur_scope->__pyx_v_start_ = __pyx_f_10bytesparse_2_c_Block_Start(__pyx_cur_scope->__pyx_v_block);
+7512: yield None, start_
__pyx_t_3 = __Pyx_PyInt_From_uint_fast64_t(__pyx_cur_scope->__pyx_v_start_); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 7512, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_7 = PyTuple_New(2); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 7512, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); __Pyx_INCREF(Py_None); __Pyx_GIVEREF(Py_None); PyTuple_SET_ITEM(__pyx_t_7, 0, Py_None); __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_7, 1, __pyx_t_3); __pyx_t_3 = 0; __pyx_r = __pyx_t_7; __pyx_t_7 = 0; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); __Pyx_Coroutine_ResetAndClearException(__pyx_generator); /* return from generator, yielding value */ __pyx_generator->resume_label = 1; return __pyx_r; __pyx_L9_resume_from_yield:; if (unlikely(!__pyx_sent_value)) __PYX_ERR(0, 7512, __pyx_L1_error)
+7513: block_index_start = 0
__pyx_cur_scope->__pyx_v_block_index_start = 0;
7514: else:
+7515: block_index_start = Rack_IndexStart(blocks, start_)
/*else*/ {
__pyx_t_12 = __pyx_f_10bytesparse_2_c_Rack_IndexStart(__pyx_cur_scope->__pyx_v_blocks, __pyx_cur_scope->__pyx_v_start_); if (unlikely(__pyx_t_12 == ((Py_ssize_t)-2L))) __PYX_ERR(0, 7515, __pyx_L1_error)
__pyx_cur_scope->__pyx_v_block_index_start = __pyx_t_12;
}
__pyx_L7:;
7516:
+7517: if endex__ is None:
__pyx_t_11 = (__pyx_cur_scope->__pyx_v_endex__ == Py_None); __pyx_t_1 = (__pyx_t_11 != 0); if (__pyx_t_1) { /* … */ goto __pyx_L10; }
+7518: block_index_endex = block_count
__pyx_cur_scope->__pyx_v_block_index_endex = __pyx_cur_scope->__pyx_v_block_count;
7519: else:
+7520: block_index_endex = Rack_IndexEndex(blocks, endex_)
/*else*/ {
__pyx_t_12 = __pyx_f_10bytesparse_2_c_Rack_IndexEndex(__pyx_cur_scope->__pyx_v_blocks, __pyx_cur_scope->__pyx_v_endex_); if (unlikely(__pyx_t_12 == ((Py_ssize_t)-2L))) __PYX_ERR(0, 7520, __pyx_L1_error)
__pyx_cur_scope->__pyx_v_block_index_endex = __pyx_t_12;
}
__pyx_L10:;
7521:
+7522: for block_index in range(block_index_start, block_index_endex):
__pyx_t_13 = __pyx_cur_scope->__pyx_v_block_index_endex; __pyx_t_14 = __pyx_t_13; for (__pyx_t_15 = __pyx_cur_scope->__pyx_v_block_index_start; __pyx_t_15 < __pyx_t_14; __pyx_t_15+=1) { __pyx_cur_scope->__pyx_v_block_index = __pyx_t_15;
+7523: block = Rack_Get__(blocks, block_index)
__pyx_cur_scope->__pyx_v_block = __pyx_f_10bytesparse_2_c_Rack_Get__(__pyx_cur_scope->__pyx_v_blocks, __pyx_cur_scope->__pyx_v_block_index);
+7524: block_start = Block_Start(block)
__pyx_cur_scope->__pyx_v_block_start = __pyx_f_10bytesparse_2_c_Block_Start(__pyx_cur_scope->__pyx_v_block);
+7525: if start_ < block_start:
__pyx_t_1 = ((__pyx_cur_scope->__pyx_v_start_ < __pyx_cur_scope->__pyx_v_block_start) != 0); if (__pyx_t_1) { /* … */ }
+7526: yield start_, block_start
__pyx_t_7 = __Pyx_PyInt_From_uint_fast64_t(__pyx_cur_scope->__pyx_v_start_); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 7526, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); __pyx_t_3 = __Pyx_PyInt_From_uint_fast64_t(__pyx_cur_scope->__pyx_v_block_start); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 7526, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_4 = PyTuple_New(2); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 7526, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_7); __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_3); __pyx_t_7 = 0; __pyx_t_3 = 0; __pyx_r = __pyx_t_4; __pyx_t_4 = 0; __pyx_cur_scope->__pyx_t_0 = __pyx_t_13; __pyx_cur_scope->__pyx_t_1 = __pyx_t_14; __pyx_cur_scope->__pyx_t_2 = __pyx_t_15; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); __Pyx_Coroutine_ResetAndClearException(__pyx_generator); /* return from generator, yielding value */ __pyx_generator->resume_label = 2; return __pyx_r; __pyx_L14_resume_from_yield:; __pyx_t_13 = __pyx_cur_scope->__pyx_t_0; __pyx_t_14 = __pyx_cur_scope->__pyx_t_1; __pyx_t_15 = __pyx_cur_scope->__pyx_t_2; if (unlikely(!__pyx_sent_value)) __PYX_ERR(0, 7526, __pyx_L1_error)
+7527: start_ = Block_Endex(block)
__pyx_cur_scope->__pyx_v_start_ = __pyx_f_10bytesparse_2_c_Block_Endex(__pyx_cur_scope->__pyx_v_block); }
7528:
+7529: if endex__ is None and not bound_:
__pyx_t_11 = (__pyx_cur_scope->__pyx_v_endex__ == Py_None); __pyx_t_16 = (__pyx_t_11 != 0); if (__pyx_t_16) { } else { __pyx_t_1 = __pyx_t_16; goto __pyx_L16_bool_binop_done; } __pyx_t_16 = ((!(__pyx_cur_scope->__pyx_v_bound_ != 0)) != 0); __pyx_t_1 = __pyx_t_16; __pyx_L16_bool_binop_done:; if (__pyx_t_1) { /* … */ goto __pyx_L15; }
+7530: yield start_, None
__pyx_t_4 = __Pyx_PyInt_From_uint_fast64_t(__pyx_cur_scope->__pyx_v_start_); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 7530, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 7530, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_4); __Pyx_INCREF(Py_None); __Pyx_GIVEREF(Py_None); PyTuple_SET_ITEM(__pyx_t_3, 1, Py_None); __pyx_t_4 = 0; __pyx_r = __pyx_t_3; __pyx_t_3 = 0; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); __Pyx_Coroutine_ResetAndClearException(__pyx_generator); /* return from generator, yielding value */ __pyx_generator->resume_label = 3; return __pyx_r; __pyx_L18_resume_from_yield:; if (unlikely(!__pyx_sent_value)) __PYX_ERR(0, 7530, __pyx_L1_error)
+7531: elif start_ < endex_:
__pyx_t_1 = ((__pyx_cur_scope->__pyx_v_start_ < __pyx_cur_scope->__pyx_v_endex_) != 0); if (__pyx_t_1) { /* … */ } __pyx_L15:;
+7532: yield start_, endex_
__pyx_t_3 = __Pyx_PyInt_From_uint_fast64_t(__pyx_cur_scope->__pyx_v_start_); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 7532, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_4 = __Pyx_PyInt_From_uint_fast64_t(__pyx_cur_scope->__pyx_v_endex_); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 7532, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __pyx_t_7 = PyTuple_New(2); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 7532, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_3); __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_7, 1, __pyx_t_4); __pyx_t_3 = 0; __pyx_t_4 = 0; __pyx_r = __pyx_t_7; __pyx_t_7 = 0; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); __Pyx_Coroutine_ResetAndClearException(__pyx_generator); /* return from generator, yielding value */ __pyx_generator->resume_label = 4; return __pyx_r; __pyx_L19_resume_from_yield:; if (unlikely(!__pyx_sent_value)) __PYX_ERR(0, 7532, __pyx_L1_error)
7533:
+7534: elif not bound_:
__pyx_t_1 = ((!(__pyx_cur_scope->__pyx_v_bound_ != 0)) != 0); if (__pyx_t_1) { /* … */ } __pyx_L4:; CYTHON_MAYBE_UNUSED_VAR(__pyx_cur_scope);
+7535: yield None, None
__Pyx_INCREF(__pyx_tuple__28); __pyx_r = __pyx_tuple__28; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); __Pyx_Coroutine_ResetAndClearException(__pyx_generator); /* return from generator, yielding value */ __pyx_generator->resume_label = 5; return __pyx_r; __pyx_L20_resume_from_yield:; if (unlikely(!__pyx_sent_value)) __PYX_ERR(0, 7535, __pyx_L1_error) /* … */ __pyx_tuple__28 = PyTuple_Pack(2, Py_None, Py_None); if (unlikely(!__pyx_tuple__28)) __PYX_ERR(0, 7535, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__28); __Pyx_GIVEREF(__pyx_tuple__28);
7536:
+7537: def equal_span(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_133equal_span(PyObject *__pyx_v_self, PyObject *__pyx_v_address); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_132equal_span[] = "Memory.equal_span(self: u'Memory', address: Address) -> Tuple[Optional[Address], Optional[Address], Optional[Value]]\nSpan of homogeneous data.\n\n It searches for the biggest chunk of data adjacent to the given\n address, with the same value at that address.\n\n If the address is within a gap, its bounds are returned, and its\n value is ``None``.\n\n If the address is before or after any data, bounds are ``None``.\n\n Arguments:\n address (int):\n Reference address.\n\n Returns:\n tuple: Start bound, exclusive end bound, and reference value.\n\n Examples:\n >>> memory = Memory()\n >>> memory.equal_span(0)\n (None, None, None)\n\n ~~~\n\n +---+---+---+---+---+---+---+---+---+---+---+\n | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10|\n +===+===+===+===+===+===+===+===+===+===+===+\n |[A | B | B | B | C]| | |[C | C | D]| |\n +---+---+---+---+---+---+---+---+---+---+---+\n | 65| 66| 66| 66| 67| | | 67| 67| 68| |\n +---+---+---+---+---+---+---+---+---+---+---+\n\n >>> memory = Memory(blocks=[[0, b'ABBBC'], [7, b'CCD']])\n >>> memory.equal_span(2)\n (1, 4, 66)\n >>> memory.equal_span(4)\n (4, 5, 67)\n >>> memory.equal_span(5)\n (5, 7, None)\n >>> memory.equal_span(10)\n (10, None, None)\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_133equal_span(PyObject *__pyx_v_self, PyObject *__pyx_v_address) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("equal_span (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_132equal_span(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), ((PyObject *)__pyx_v_address)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_132equal_span(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_address) { Rack_ const *__pyx_v_blocks; size_t __pyx_v_block_count; size_t __pyx_v_block_index; Block_ const *__pyx_v_block; addr_t __pyx_v_block_start; addr_t __pyx_v_block_endex; addr_t __pyx_v_address_; addr_t __pyx_v_start; addr_t __pyx_v_endex; size_t __pyx_v_offset; byte_t __pyx_v_value; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("equal_span", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_7); __Pyx_XDECREF(__pyx_t_8); __Pyx_XDECREF(__pyx_t_11); __Pyx_XDECREF(__pyx_t_12); __Pyx_AddTraceback("bytesparse._c.Memory.equal_span", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
7538: self: 'Memory',
7539: address: Address,
7540: ) -> Tuple[Optional[Address], Optional[Address], Optional[Value]]:
7541: r"""Span of homogeneous data.
7542:
7543: It searches for the biggest chunk of data adjacent to the given
7544: address, with the same value at that address.
7545:
7546: If the address is within a gap, its bounds are returned, and its
7547: value is ``None``.
7548:
7549: If the address is before or after any data, bounds are ``None``.
7550:
7551: Arguments:
7552: address (int):
7553: Reference address.
7554:
7555: Returns:
7556: tuple: Start bound, exclusive end bound, and reference value.
7557:
7558: Examples:
7559: >>> memory = Memory()
7560: >>> memory.equal_span(0)
7561: (None, None, None)
7562:
7563: ~~~
7564:
7565: +---+---+---+---+---+---+---+---+---+---+---+
7566: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10|
7567: +===+===+===+===+===+===+===+===+===+===+===+
7568: |[A | B | B | B | C]| | |[C | C | D]| |
7569: +---+---+---+---+---+---+---+---+---+---+---+
7570: | 65| 66| 66| 66| 67| | | 67| 67| 68| |
7571: +---+---+---+---+---+---+---+---+---+---+---+
7572:
7573: >>> memory = Memory(blocks=[[0, b'ABBBC'], [7, b'CCD']])
7574: >>> memory.equal_span(2)
7575: (1, 4, 66)
7576: >>> memory.equal_span(4)
7577: (4, 5, 67)
7578: >>> memory.equal_span(5)
7579: (5, 7, None)
7580: >>> memory.equal_span(10)
7581: (10, None, None)
7582: """
7583: cdef:
+7584: const Rack_* blocks = self._
__pyx_t_1 = __pyx_v_self->_; __pyx_v_blocks = __pyx_t_1;
+7585: size_t block_count = Rack_Length(blocks)
__pyx_v_block_count = __pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks);
7586: size_t block_index
7587: size_t block_index_start
7588: size_t block_index_endex
7589: const Block_* block
7590: addr_t block_start
7591: addr_t block_endex
+7592: addr_t address_ = <addr_t>address
__pyx_t_2 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_address); if (unlikely((__pyx_t_2 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 7592, __pyx_L1_error) __pyx_v_address_ = ((addr_t)__pyx_t_2);
7593: addr_t start
7594: addr_t endex
7595: size_t offset
7596: byte_t value
7597:
+7598: block_index = Rack_IndexStart(blocks, address_)
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Rack_IndexStart(__pyx_v_blocks, __pyx_v_address_); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-2L))) __PYX_ERR(0, 7598, __pyx_L1_error)
__pyx_v_block_index = __pyx_t_3;
7599:
+7600: if block_index < block_count:
__pyx_t_4 = ((__pyx_v_block_index < __pyx_v_block_count) != 0); if (__pyx_t_4) { /* … */ }
+7601: block = Rack_Get__(blocks, block_index)
__pyx_v_block = __pyx_f_10bytesparse_2_c_Rack_Get__(__pyx_v_blocks, __pyx_v_block_index);
+7602: block_start = Block_Start(block)
__pyx_v_block_start = __pyx_f_10bytesparse_2_c_Block_Start(__pyx_v_block);
+7603: block_endex = Block_Endex(block)
__pyx_v_block_endex = __pyx_f_10bytesparse_2_c_Block_Endex(__pyx_v_block);
7604:
+7605: if block_start <= address_ < block_endex:
__pyx_t_4 = (__pyx_v_block_start <= __pyx_v_address_); if (__pyx_t_4) { __pyx_t_4 = (__pyx_v_address_ < __pyx_v_block_endex); } __pyx_t_5 = (__pyx_t_4 != 0); if (__pyx_t_5) { /* … */ }
7606: # Address within a block
+7607: CheckSubAddrU(address_, block_start)
__pyx_t_6 = __pyx_f_10bytesparse_2_c_CheckSubAddrU(__pyx_v_address_, __pyx_v_block_start); if (unlikely(__pyx_t_6 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 7607, __pyx_L1_error)
+7608: CheckAddrToSizeU(address - block_start)
__pyx_t_7 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_block_start); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 7608, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); __pyx_t_8 = PyNumber_Subtract(__pyx_v_address, __pyx_t_7); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 7608, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_8); __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; __pyx_t_2 = __Pyx_PyInt_As_uint_fast64_t(__pyx_t_8); if (unlikely((__pyx_t_2 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 7608, __pyx_L1_error) __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; __pyx_t_6 = __pyx_f_10bytesparse_2_c_CheckAddrToSizeU(__pyx_t_2); if (unlikely(__pyx_t_6 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 7608, __pyx_L1_error)
+7609: offset = <size_t>(address_ - block_start)
__pyx_v_offset = ((size_t)(__pyx_v_address_ - __pyx_v_block_start));
+7610: start = offset
__pyx_v_start = __pyx_v_offset;
+7611: CheckAddAddrU(offset, 1)
__pyx_t_6 = __pyx_f_10bytesparse_2_c_CheckAddAddrU(__pyx_v_offset, 1); if (unlikely(__pyx_t_6 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 7611, __pyx_L1_error)
+7612: endex = offset + 1
__pyx_v_endex = (__pyx_v_offset + 1);
+7613: value = Block_Get__(block, offset)
__pyx_v_value = __pyx_f_10bytesparse_2_c_Block_Get__(__pyx_v_block, __pyx_v_offset);
7614:
+7615: for start in range(start + 1, 0, -1):
for (__pyx_t_2 = (__pyx_v_start + 1) + 1; __pyx_t_2 > 0 + 1; ) { __pyx_t_2-=1; __pyx_v_start = __pyx_t_2;
+7616: if Block_Get__(block, start - 1) != value:
__pyx_t_5 = ((__pyx_f_10bytesparse_2_c_Block_Get__(__pyx_v_block, (__pyx_v_start - 1)) != __pyx_v_value) != 0); if (__pyx_t_5) { /* … */ } } /*else*/ {
+7617: break
goto __pyx_L6_break;
7618: else:
+7619: start = 0
__pyx_v_start = 0; } __pyx_L6_break:;
7620:
+7621: for endex in range(endex, Block_Length(block)):
__pyx_t_9 = __pyx_f_10bytesparse_2_c_Block_Length(__pyx_v_block); __pyx_t_10 = __pyx_t_9; for (__pyx_t_2 = __pyx_v_endex; __pyx_t_2 < __pyx_t_10; __pyx_t_2+=1) { __pyx_v_endex = __pyx_t_2;
+7622: if Block_Get__(block, endex) != value:
__pyx_t_5 = ((__pyx_f_10bytesparse_2_c_Block_Get__(__pyx_v_block, __pyx_v_endex) != __pyx_v_value) != 0); if (__pyx_t_5) { /* … */ } } /*else*/ {
+7623: break
goto __pyx_L9_break;
7624: else:
+7625: endex = Block_Length(block)
__pyx_v_endex = __pyx_f_10bytesparse_2_c_Block_Length(__pyx_v_block); } __pyx_L9_break:;
7626:
+7627: block_endex = block_start + endex
__pyx_v_block_endex = (__pyx_v_block_start + __pyx_v_endex);
+7628: block_start = block_start + start
__pyx_v_block_start = (__pyx_v_block_start + __pyx_v_start);
+7629: return block_start, block_endex, value # equal data span
__Pyx_XDECREF(__pyx_r); __pyx_t_8 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_block_start); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 7629, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_8); __pyx_t_7 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_block_endex); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 7629, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); __pyx_t_11 = __Pyx_PyInt_From_byte_t(__pyx_v_value); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 7629, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_11); __pyx_t_12 = PyTuple_New(3); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 7629, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_12); __Pyx_GIVEREF(__pyx_t_8); PyTuple_SET_ITEM(__pyx_t_12, 0, __pyx_t_8); __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_12, 1, __pyx_t_7); __Pyx_GIVEREF(__pyx_t_11); PyTuple_SET_ITEM(__pyx_t_12, 2, __pyx_t_11); __pyx_t_8 = 0; __pyx_t_7 = 0; __pyx_t_11 = 0; __pyx_r = __pyx_t_12; __pyx_t_12 = 0; goto __pyx_L0;
7630:
+7631: elif block_index:
__pyx_t_5 = (__pyx_v_block_index != 0); if (__pyx_t_5) { /* … */ }
7632: # Address within a gap
+7633: block_endex = block_start # end gap before next block
__pyx_v_block_endex = __pyx_v_block_start;
+7634: block = Rack_Get__(blocks, block_index - 1)
__pyx_v_block = __pyx_f_10bytesparse_2_c_Rack_Get__(__pyx_v_blocks, (__pyx_v_block_index - 1));
+7635: block_start = Block_Endex(block) # start gap after previous block
__pyx_v_block_start = __pyx_f_10bytesparse_2_c_Block_Endex(__pyx_v_block);
+7636: return block_start, block_endex, None # gap span
__Pyx_XDECREF(__pyx_r); __pyx_t_12 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_block_start); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 7636, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_12); __pyx_t_11 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_block_endex); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 7636, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_11); __pyx_t_7 = PyTuple_New(3); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 7636, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); __Pyx_GIVEREF(__pyx_t_12); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_12); __Pyx_GIVEREF(__pyx_t_11); PyTuple_SET_ITEM(__pyx_t_7, 1, __pyx_t_11); __Pyx_INCREF(Py_None); __Pyx_GIVEREF(Py_None); PyTuple_SET_ITEM(__pyx_t_7, 2, Py_None); __pyx_t_12 = 0; __pyx_t_11 = 0; __pyx_r = __pyx_t_7; __pyx_t_7 = 0; goto __pyx_L0;
7637:
7638: else:
7639: # Address before content
+7640: return None, block_start, None # open left
/*else*/ { __Pyx_XDECREF(__pyx_r); __pyx_t_7 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_block_start); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 7640, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); __pyx_t_11 = PyTuple_New(3); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 7640, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_11); __Pyx_INCREF(Py_None); __Pyx_GIVEREF(Py_None); PyTuple_SET_ITEM(__pyx_t_11, 0, Py_None); __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_11, 1, __pyx_t_7); __Pyx_INCREF(Py_None); __Pyx_GIVEREF(Py_None); PyTuple_SET_ITEM(__pyx_t_11, 2, Py_None); __pyx_t_7 = 0; __pyx_r = __pyx_t_11; __pyx_t_11 = 0; goto __pyx_L0; }
7641:
7642: else:
7643: # Address after content
+7644: if block_count:
/*else*/ { __pyx_t_5 = (__pyx_v_block_count != 0); if (__pyx_t_5) { /* … */ }
+7645: block = Rack_Last__(blocks)
__pyx_v_block = __pyx_f_10bytesparse_2_c_Rack_Last__(__pyx_v_blocks);
+7646: block_start = Block_Start(block)
__pyx_v_block_start = __pyx_f_10bytesparse_2_c_Block_Start(__pyx_v_block);
+7647: block_endex = Block_Endex(block)
__pyx_v_block_endex = __pyx_f_10bytesparse_2_c_Block_Endex(__pyx_v_block);
+7648: return block_endex, None, None # open right
__Pyx_XDECREF(__pyx_r); __pyx_t_11 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_block_endex); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 7648, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_11); __pyx_t_7 = PyTuple_New(3); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 7648, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); __Pyx_GIVEREF(__pyx_t_11); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_11); __Pyx_INCREF(Py_None); __Pyx_GIVEREF(Py_None); PyTuple_SET_ITEM(__pyx_t_7, 1, Py_None); __Pyx_INCREF(Py_None); __Pyx_GIVEREF(Py_None); PyTuple_SET_ITEM(__pyx_t_7, 2, Py_None); __pyx_t_11 = 0; __pyx_r = __pyx_t_7; __pyx_t_7 = 0; goto __pyx_L0;
7649:
7650: else:
+7651: return None, None, None # fully open
/*else*/ { __Pyx_XDECREF(__pyx_r); __Pyx_INCREF(__pyx_tuple__29); __pyx_r = __pyx_tuple__29; goto __pyx_L0; } } /* … */ __pyx_tuple__29 = PyTuple_Pack(3, Py_None, Py_None, Py_None); if (unlikely(!__pyx_tuple__29)) __PYX_ERR(0, 7651, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__29); __Pyx_GIVEREF(__pyx_tuple__29);
7652:
+7653: def block_span(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_135block_span(PyObject *__pyx_v_self, PyObject *__pyx_v_address); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_134block_span[] = "Memory.block_span(self: u'Memory', address: Address) -> Tuple[Optional[Address], Optional[Address], Optional[Value]]\nSpan of block data.\n\n It searches for the biggest chunk of data adjacent to the given\n address.\n\n If the address is within a gap, its bounds are returned, and its\n value is ``None``.\n\n If the address is before or after any data, bounds are ``None``.\n\n Arguments:\n address (int):\n Reference address.\n\n Returns:\n tuple: Start bound, exclusive end bound, and reference value.\n\n Examples:\n >>> memory = Memory()\n >>> memory.block_span(0)\n (None, None, None)\n\n ~~~\n\n +---+---+---+---+---+---+---+---+---+---+---+\n | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10|\n +===+===+===+===+===+===+===+===+===+===+===+\n |[A | B | B | B | C]| | |[C | C | D]| |\n +---+---+---+---+---+---+---+---+---+---+---+\n | 65| 66| 66| 66| 67| | | 67| 67| 68| |\n +---+---+---+---+---+---+---+---+---+---+---+\n\n >>> memory = Memory(blocks=[[0, b'ABBBC'], [7, b'CCD']])\n >>> memory.block_span(2)\n (0, 5, 66)\n >>> memory.block_span(4)\n (0, 5, 67)\n >>> memory.block_span(5)\n (5, 7, None)\n >>> memory.block_span(10)\n (10, None, None)\n "; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_135block_span(PyObject *__pyx_v_self, PyObject *__pyx_v_address) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("block_span (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_134block_span(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), ((PyObject *)__pyx_v_address)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_134block_span(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_address) { addr_t __pyx_v_address_; Rack_ const *__pyx_v_blocks; size_t __pyx_v_block_count; size_t __pyx_v_block_index; Block_ const *__pyx_v_block; addr_t __pyx_v_block_start; addr_t __pyx_v_block_endex; byte_t __pyx_v_value; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("block_span", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_7); __Pyx_XDECREF(__pyx_t_8); __Pyx_XDECREF(__pyx_t_9); __Pyx_XDECREF(__pyx_t_10); __Pyx_AddTraceback("bytesparse._c.Memory.block_span", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
7654: self: 'Memory',
7655: address: Address,
7656: ) -> Tuple[Optional[Address], Optional[Address], Optional[Value]]:
7657: r"""Span of block data.
7658:
7659: It searches for the biggest chunk of data adjacent to the given
7660: address.
7661:
7662: If the address is within a gap, its bounds are returned, and its
7663: value is ``None``.
7664:
7665: If the address is before or after any data, bounds are ``None``.
7666:
7667: Arguments:
7668: address (int):
7669: Reference address.
7670:
7671: Returns:
7672: tuple: Start bound, exclusive end bound, and reference value.
7673:
7674: Examples:
7675: >>> memory = Memory()
7676: >>> memory.block_span(0)
7677: (None, None, None)
7678:
7679: ~~~
7680:
7681: +---+---+---+---+---+---+---+---+---+---+---+
7682: | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10|
7683: +===+===+===+===+===+===+===+===+===+===+===+
7684: |[A | B | B | B | C]| | |[C | C | D]| |
7685: +---+---+---+---+---+---+---+---+---+---+---+
7686: | 65| 66| 66| 66| 67| | | 67| 67| 68| |
7687: +---+---+---+---+---+---+---+---+---+---+---+
7688:
7689: >>> memory = Memory(blocks=[[0, b'ABBBC'], [7, b'CCD']])
7690: >>> memory.block_span(2)
7691: (0, 5, 66)
7692: >>> memory.block_span(4)
7693: (0, 5, 67)
7694: >>> memory.block_span(5)
7695: (5, 7, None)
7696: >>> memory.block_span(10)
7697: (10, None, None)
7698: """
7699: cdef:
+7700: addr_t address_ = <addr_t>address
__pyx_t_1 = __Pyx_PyInt_As_uint_fast64_t(__pyx_v_address); if (unlikely((__pyx_t_1 == ((addr_t)-1)) && PyErr_Occurred())) __PYX_ERR(0, 7700, __pyx_L1_error) __pyx_v_address_ = ((addr_t)__pyx_t_1);
+7701: const Rack_* blocks = self._
__pyx_t_2 = __pyx_v_self->_; __pyx_v_blocks = __pyx_t_2;
+7702: size_t block_count = Rack_Length(blocks)
__pyx_v_block_count = __pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks);
7703: size_t block_index
7704: const Block_* block
7705: addr_t block_start
7706: addr_t block_endex
7707: byte_t value
7708:
+7709: block_index = Rack_IndexStart(blocks, address_)
__pyx_t_3 = __pyx_f_10bytesparse_2_c_Rack_IndexStart(__pyx_v_blocks, __pyx_v_address_); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-2L))) __PYX_ERR(0, 7709, __pyx_L1_error)
__pyx_v_block_index = __pyx_t_3;
7710:
+7711: if block_index < block_count:
__pyx_t_4 = ((__pyx_v_block_index < __pyx_v_block_count) != 0); if (__pyx_t_4) { /* … */ }
+7712: block = Rack_Get__(blocks, block_index)
__pyx_v_block = __pyx_f_10bytesparse_2_c_Rack_Get__(__pyx_v_blocks, __pyx_v_block_index);
+7713: block_start = Block_Start(block)
__pyx_v_block_start = __pyx_f_10bytesparse_2_c_Block_Start(__pyx_v_block);
+7714: block_endex = Block_Endex(block)
__pyx_v_block_endex = __pyx_f_10bytesparse_2_c_Block_Endex(__pyx_v_block);
7715:
+7716: if block_start <= address_ < block_endex:
__pyx_t_4 = (__pyx_v_block_start <= __pyx_v_address_); if (__pyx_t_4) { __pyx_t_4 = (__pyx_v_address_ < __pyx_v_block_endex); } __pyx_t_5 = (__pyx_t_4 != 0); if (__pyx_t_5) { /* … */ }
7717: # Address within a block
+7718: CheckSubAddrU(address_, block_start)
__pyx_t_6 = __pyx_f_10bytesparse_2_c_CheckSubAddrU(__pyx_v_address_, __pyx_v_block_start); if (unlikely(__pyx_t_6 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 7718, __pyx_L1_error)
+7719: CheckAddrToSizeU(address_ - block_start)
__pyx_t_6 = __pyx_f_10bytesparse_2_c_CheckAddrToSizeU((__pyx_v_address_ - __pyx_v_block_start)); if (unlikely(__pyx_t_6 == ((__pyx_t_10bytesparse_2_c_vint)-1))) __PYX_ERR(0, 7719, __pyx_L1_error)
+7720: value = Block_Get__(block, <size_t>(address_ - block_start))
__pyx_v_value = __pyx_f_10bytesparse_2_c_Block_Get__(__pyx_v_block, ((size_t)(__pyx_v_address_ - __pyx_v_block_start)));
+7721: return block_start, block_endex, value # block span
__Pyx_XDECREF(__pyx_r); __pyx_t_7 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_block_start); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 7721, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); __pyx_t_8 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_block_endex); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 7721, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_8); __pyx_t_9 = __Pyx_PyInt_From_byte_t(__pyx_v_value); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 7721, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_9); __pyx_t_10 = PyTuple_New(3); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 7721, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_10); __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_10, 0, __pyx_t_7); __Pyx_GIVEREF(__pyx_t_8); PyTuple_SET_ITEM(__pyx_t_10, 1, __pyx_t_8); __Pyx_GIVEREF(__pyx_t_9); PyTuple_SET_ITEM(__pyx_t_10, 2, __pyx_t_9); __pyx_t_7 = 0; __pyx_t_8 = 0; __pyx_t_9 = 0; __pyx_r = __pyx_t_10; __pyx_t_10 = 0; goto __pyx_L0;
7722:
+7723: elif block_index:
__pyx_t_5 = (__pyx_v_block_index != 0); if (__pyx_t_5) { /* … */ }
7724: # Address within a gap
+7725: block_endex = block_start # end gap before next block
__pyx_v_block_endex = __pyx_v_block_start;
+7726: block = Rack_Get__(blocks, block_index - 1)
__pyx_v_block = __pyx_f_10bytesparse_2_c_Rack_Get__(__pyx_v_blocks, (__pyx_v_block_index - 1));
+7727: block_start = Block_Endex(block) # start gap after previous block
__pyx_v_block_start = __pyx_f_10bytesparse_2_c_Block_Endex(__pyx_v_block);
+7728: return block_start, block_endex, None # gap span
__Pyx_XDECREF(__pyx_r); __pyx_t_10 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_block_start); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 7728, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_10); __pyx_t_9 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_block_endex); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 7728, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_9); __pyx_t_8 = PyTuple_New(3); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 7728, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_8); __Pyx_GIVEREF(__pyx_t_10); PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_10); __Pyx_GIVEREF(__pyx_t_9); PyTuple_SET_ITEM(__pyx_t_8, 1, __pyx_t_9); __Pyx_INCREF(Py_None); __Pyx_GIVEREF(Py_None); PyTuple_SET_ITEM(__pyx_t_8, 2, Py_None); __pyx_t_10 = 0; __pyx_t_9 = 0; __pyx_r = __pyx_t_8; __pyx_t_8 = 0; goto __pyx_L0;
7729:
7730: else:
7731: # Address before content
+7732: return None, block_start, None # open left
/*else*/ { __Pyx_XDECREF(__pyx_r); __pyx_t_8 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_block_start); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 7732, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_8); __pyx_t_9 = PyTuple_New(3); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 7732, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_9); __Pyx_INCREF(Py_None); __Pyx_GIVEREF(Py_None); PyTuple_SET_ITEM(__pyx_t_9, 0, Py_None); __Pyx_GIVEREF(__pyx_t_8); PyTuple_SET_ITEM(__pyx_t_9, 1, __pyx_t_8); __Pyx_INCREF(Py_None); __Pyx_GIVEREF(Py_None); PyTuple_SET_ITEM(__pyx_t_9, 2, Py_None); __pyx_t_8 = 0; __pyx_r = __pyx_t_9; __pyx_t_9 = 0; goto __pyx_L0; }
7733:
7734: else:
7735: # Address after content
+7736: if block_count:
/*else*/ { __pyx_t_5 = (__pyx_v_block_count != 0); if (__pyx_t_5) { /* … */ }
+7737: block = Rack_Last__(blocks)
__pyx_v_block = __pyx_f_10bytesparse_2_c_Rack_Last__(__pyx_v_blocks);
+7738: block_start = Block_Start(block)
__pyx_v_block_start = __pyx_f_10bytesparse_2_c_Block_Start(__pyx_v_block);
+7739: block_endex = Block_Endex(block)
__pyx_v_block_endex = __pyx_f_10bytesparse_2_c_Block_Endex(__pyx_v_block);
+7740: return block_endex, None, None # open right
__Pyx_XDECREF(__pyx_r); __pyx_t_9 = __Pyx_PyInt_From_uint_fast64_t(__pyx_v_block_endex); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 7740, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_9); __pyx_t_8 = PyTuple_New(3); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 7740, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_8); __Pyx_GIVEREF(__pyx_t_9); PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_9); __Pyx_INCREF(Py_None); __Pyx_GIVEREF(Py_None); PyTuple_SET_ITEM(__pyx_t_8, 1, Py_None); __Pyx_INCREF(Py_None); __Pyx_GIVEREF(Py_None); PyTuple_SET_ITEM(__pyx_t_8, 2, Py_None); __pyx_t_9 = 0; __pyx_r = __pyx_t_8; __pyx_t_8 = 0; goto __pyx_L0;
7741:
7742: else:
+7743: return None, None, None # fully open
/*else*/ { __Pyx_XDECREF(__pyx_r); __Pyx_INCREF(__pyx_tuple__29); __pyx_r = __pyx_tuple__29; goto __pyx_L0; } }
7744:
+7745: def _to_blocks(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_137_to_blocks(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static char __pyx_doc_10bytesparse_2_c_6Memory_136_to_blocks[] = "Memory._to_blocks(self: u'Memory', size_max: Optional[Address] = STR_MAX_CONTENT_SIZE) -> BlockList\nConverts into a list of blocks."; static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_137_to_blocks(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_size_max = 0; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("_to_blocks (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_size_max,0}; PyObject* values[1] = {0}; values[0] = __pyx_k__30; if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (kw_args > 0) { PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_size_max); if (value) { values[0] = value; kw_args--; } } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "_to_blocks") < 0)) __PYX_ERR(0, 7745, __pyx_L3_error) } } else { switch (PyTuple_GET_SIZE(__pyx_args)) { case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } } __pyx_v_size_max = values[0]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("_to_blocks", 0, 0, 1, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 7745, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("bytesparse._c.Memory._to_blocks", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_136_to_blocks(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self), __pyx_v_size_max); int __pyx_lineno = 0; const char *__pyx_filename = NULL; int __pyx_clineno = 0; /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_136_to_blocks(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self, PyObject *__pyx_v_size_max) { Rack_ const *__pyx_v_blocks1; size_t __pyx_v_block_count; size_t __pyx_v_block_index; Block_ *__pyx_v_block; size_t __pyx_v_size; __Pyx_memviewslice __pyx_v_view = { 0, 0, { 0 }, { 0 }, { 0 } }; PyObject *__pyx_v_blocks2 = 0; PyObject *__pyx_v_data = NULL; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("_to_blocks", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(((PyObject *)__pyx_t_7)); __Pyx_XDECREF(__pyx_t_8); __PYX_XDEC_MEMVIEW(&__pyx_t_9, 1); __Pyx_XDECREF(__pyx_t_13); __Pyx_AddTraceback("bytesparse._c.Memory._to_blocks", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __PYX_XDEC_MEMVIEW(&__pyx_v_view, 1); __Pyx_XDECREF(__pyx_v_blocks2); __Pyx_XDECREF(__pyx_v_data); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
7746: self: 'Memory',
+7747: size_max: Optional[Address] = STR_MAX_CONTENT_SIZE,
__Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_STR_MAX_CONTENT_SIZE); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 7747, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_k__30 = __pyx_t_1; __Pyx_GIVEREF(__pyx_t_1); __pyx_t_1 = 0;
7748: ) -> BlockList:
7749: r"""Converts into a list of blocks."""
7750: cdef:
+7751: const Rack_* blocks1 = self._
__pyx_t_1 = __pyx_v_self->_; __pyx_v_blocks1 = __pyx_t_1;
+7752: size_t block_count = Rack_Length(blocks1)
__pyx_v_block_count = __pyx_f_10bytesparse_2_c_Rack_Length(__pyx_v_blocks1);
7753: size_t block_index
+7754: Block_* block = NULL
__pyx_v_block = NULL;
7755: size_t size
7756: const byte_t[:] view
+7757: list blocks2 = []
__pyx_t_2 = PyList_New(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 7757, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_v_blocks2 = ((PyObject*)__pyx_t_2); __pyx_t_2 = 0;
7758:
+7759: for block_index in range(block_count):
__pyx_t_3 = __pyx_v_block_count; __pyx_t_4 = __pyx_t_3; for (__pyx_t_5 = 0; __pyx_t_5 < __pyx_t_4; __pyx_t_5+=1) { __pyx_v_block_index = __pyx_t_5;
+7760: block = Rack_Get__(blocks1, block_index)
__pyx_v_block = __pyx_f_10bytesparse_2_c_Rack_Get__(__pyx_v_blocks1, __pyx_v_block_index);
+7761: size = Block_Length(block)
__pyx_v_size = __pyx_f_10bytesparse_2_c_Block_Length(__pyx_v_block);
+7762: view = <const byte_t[:size]><const byte_t*>Block_At__(block, 0)
__pyx_t_6 = ((byte_t const *)__pyx_f_10bytesparse_2_c_Block_At__(__pyx_v_block, 0)); if (!__pyx_t_6) { PyErr_SetString(PyExc_ValueError,"Cannot create cython.array from NULL pointer"); __PYX_ERR(0, 7762, __pyx_L1_error) } __pyx_t_8 = __pyx_format_from_typeinfo(&__Pyx_TypeInfo_nn_byte_t__const__); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 7762, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_8); __pyx_t_2 = Py_BuildValue((char*) "(" __PYX_BUILD_PY_SSIZE_T ")", ((Py_ssize_t)__pyx_v_size)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 7762, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_7 = __pyx_array_new(__pyx_t_2, sizeof(byte_t const ), PyBytes_AS_STRING(__pyx_t_8), (char *) "c", (char *) __pyx_t_6); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 7762, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; __pyx_t_9 = __Pyx_PyObject_to_MemoryviewSlice_ds_nn_byte_t__const__(((PyObject *)__pyx_t_7), 0); if (unlikely(!__pyx_t_9.memview)) __PYX_ERR(0, 7762, __pyx_L1_error) __Pyx_DECREF(((PyObject *)__pyx_t_7)); __pyx_t_7 = 0; __PYX_XDEC_MEMVIEW(&__pyx_v_view, 1); __pyx_v_view = __pyx_t_9; __pyx_t_9.memview = NULL; __pyx_t_9.data = NULL;
+7763: data = bytearray(view) if size_max is None or size < size_max else view
__pyx_t_11 = (__pyx_v_size_max == Py_None); __pyx_t_12 = (__pyx_t_11 != 0); if (!__pyx_t_12) { } else { __pyx_t_10 = __pyx_t_12; goto __pyx_L5_bool_binop_done; } __pyx_t_2 = __Pyx_PyInt_FromSize_t(__pyx_v_size); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 7763, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_13 = PyObject_RichCompare(__pyx_t_2, __pyx_v_size_max, Py_LT); __Pyx_XGOTREF(__pyx_t_13); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 7763, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_t_12 = __Pyx_PyObject_IsTrue(__pyx_t_13); if (unlikely(__pyx_t_12 < 0)) __PYX_ERR(0, 7763, __pyx_L1_error) __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0; __pyx_t_10 = __pyx_t_12; __pyx_L5_bool_binop_done:; if (__pyx_t_10) { __pyx_t_13 = __pyx_memoryview_fromslice(__pyx_v_view, 1, (PyObject *(*)(char *)) __pyx_memview_get_nn_byte_t__const__, (int (*)(char *, PyObject *)) NULL, 0);; if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 7763, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_13); __pyx_t_2 = __Pyx_PyObject_CallOneArg(((PyObject *)(&PyByteArray_Type)), __pyx_t_13); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 7763, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0; __pyx_t_8 = __pyx_t_2; __pyx_t_2 = 0; } else { __pyx_t_2 = __pyx_memoryview_fromslice(__pyx_v_view, 1, (PyObject *(*)(char *)) __pyx_memview_get_nn_byte_t__const__, (int (*)(char *, PyObject *)) NULL, 0);; if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 7763, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_8 = __pyx_t_2; __pyx_t_2 = 0; } __Pyx_XDECREF_SET(__pyx_v_data, __pyx_t_8); __pyx_t_8 = 0;
+7764: blocks2.append([Block_Start(block), data])
__pyx_t_8 = __Pyx_PyInt_From_uint_fast64_t(__pyx_f_10bytesparse_2_c_Block_Start(__pyx_v_block)); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 7764, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_8); __pyx_t_2 = PyList_New(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 7764, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_GIVEREF(__pyx_t_8); PyList_SET_ITEM(__pyx_t_2, 0, __pyx_t_8); __Pyx_INCREF(__pyx_v_data); __Pyx_GIVEREF(__pyx_v_data); PyList_SET_ITEM(__pyx_t_2, 1, __pyx_v_data); __pyx_t_8 = 0; __pyx_t_14 = __Pyx_PyList_Append(__pyx_v_blocks2, __pyx_t_2); if (unlikely(__pyx_t_14 == ((int)-1))) __PYX_ERR(0, 7764, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; }
+7765: return blocks2
__Pyx_XDECREF(__pyx_r); __Pyx_INCREF(__pyx_v_blocks2); __pyx_r = __pyx_v_blocks2; goto __pyx_L0;
7766:
7767: @property
+7768: def _blocks(
/* Python wrapper */ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_7_blocks_1__get__(PyObject *__pyx_v_self); /*proto*/ static PyObject *__pyx_pw_10bytesparse_2_c_6Memory_7_blocks_1__get__(PyObject *__pyx_v_self) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); __pyx_r = __pyx_pf_10bytesparse_2_c_6Memory_7_blocks___get__(((struct __pyx_obj_10bytesparse_2_c_Memory *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_10bytesparse_2_c_6Memory_7_blocks___get__(struct __pyx_obj_10bytesparse_2_c_Memory *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("bytesparse._c.Memory._blocks.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; }
7769: self: 'Memory',
7770: ) -> BlockList:
7771: r"""list of blocks: A sequence of spaced blocks, sorted by address."""
7772:
+7773: return self._to_blocks(size_max=None)
__Pyx_XDECREF(__pyx_r); __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_to_blocks); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 7773, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_t_2 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 7773, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); if (PyDict_SetItem(__pyx_t_2, __pyx_n_s_size_max, Py_None) < 0) __PYX_ERR(0, 7773, __pyx_L1_error) __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_empty_tuple, __pyx_t_2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 7773, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_r = __pyx_t_3; __pyx_t_3 = 0; goto __pyx_L0;