aboutsummaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
authorEli Zaretskii2018-06-29 16:55:20 +0300
committerEli Zaretskii2018-06-29 16:55:20 +0300
commiteec71ebdb50c3110bb747db57c7d7f04b6d14ad1 (patch)
tree78f81836140a0b47e8875e86623c1a091291110e /src
parent93c41ce6aa64b14fc9bd7bdd0d909915a79191cd (diff)
downloademacs-eec71ebdb50c3110bb747db57c7d7f04b6d14ad1.tar.gz
emacs-eec71ebdb50c3110bb747db57c7d7f04b6d14ad1.zip
Speed up replace-buffer-contents
* src/editfns.c (EXTRA_CONTEXT_FIELDS): Add a_unibyte and b_unibyte members. (rbc_quitcounter): New static variable. (Freplace_buffer_contents): Initialize a_unibyte, b_unibyte, and rbc_quitcounter. Inhibit modification hooks if they were not already inhibited. Use rarely_quit to allow user to quit, to avoid calling maybe_quit too frequently (which hurts performance). Remove redundant assertions (which hurt performance too much). Call signal_after_change and update_compositions after all the changes are done. (buffer_chars_equal): Remove redundant assertions (which hurt performance). Avoid using BUF_FETCH_CHAR_AS_MULTIBYTE, which hurts performance by referencing Lisp symbols; instead, use lower-level macros with explicit tests to select which macro to use. (Bug#31888)
Diffstat (limited to 'src')
-rw-r--r--src/editfns.c83
1 files changed, 58 insertions, 25 deletions
diff --git a/src/editfns.c b/src/editfns.c
index 4fba68692bf..4d3c838d2ff 100644
--- a/src/editfns.c
+++ b/src/editfns.c
@@ -3115,6 +3115,9 @@ determines whether case is significant or ignored. */)
3115#undef ELEMENT 3115#undef ELEMENT
3116#undef EQUAL 3116#undef EQUAL
3117 3117
3118/* Counter used to rarely_quit in replace-buffer-contents. */
3119static unsigned short rbc_quitcounter;
3120
3118#define XVECREF_YVECREF_EQUAL(ctx, xoff, yoff) \ 3121#define XVECREF_YVECREF_EQUAL(ctx, xoff, yoff) \
3119 buffer_chars_equal ((ctx), (xoff), (yoff)) 3122 buffer_chars_equal ((ctx), (xoff), (yoff))
3120 3123
@@ -3124,6 +3127,9 @@ determines whether case is significant or ignored. */)
3124 /* Buffers to compare. */ \ 3127 /* Buffers to compare. */ \
3125 struct buffer *buffer_a; \ 3128 struct buffer *buffer_a; \
3126 struct buffer *buffer_b; \ 3129 struct buffer *buffer_b; \
3130 /* Whether each buffer is unibyte/plain-ASCII or not. */ \
3131 bool a_unibyte; \
3132 bool b_unibyte; \
3127 /* Bit vectors recording for each character whether it was deleted 3133 /* Bit vectors recording for each character whether it was deleted
3128 or inserted. */ \ 3134 or inserted. */ \
3129 unsigned char *deletions; \ 3135 unsigned char *deletions; \
@@ -3202,6 +3208,8 @@ differences between the two buffers. */)
3202 struct context ctx = { 3208 struct context ctx = {
3203 .buffer_a = a, 3209 .buffer_a = a,
3204 .buffer_b = b, 3210 .buffer_b = b,
3211 .a_unibyte = BUF_ZV (a) == BUF_ZV_BYTE (a),
3212 .b_unibyte = BUF_ZV (b) == BUF_ZV_BYTE (b),
3205 .deletions = SAFE_ALLOCA (del_bytes), 3213 .deletions = SAFE_ALLOCA (del_bytes),
3206 .insertions = SAFE_ALLOCA (ins_bytes), 3214 .insertions = SAFE_ALLOCA (ins_bytes),
3207 .fdiag = buffer + size_b + 1, 3215 .fdiag = buffer + size_b + 1,
@@ -3218,10 +3226,25 @@ differences between the two buffers. */)
3218 early. */ 3226 early. */
3219 eassert (! early_abort); 3227 eassert (! early_abort);
3220 3228
3229 rbc_quitcounter = 0;
3230
3221 Fundo_boundary (); 3231 Fundo_boundary ();
3232 bool modification_hooks_inhibited = false;
3222 ptrdiff_t count = SPECPDL_INDEX (); 3233 ptrdiff_t count = SPECPDL_INDEX ();
3223 record_unwind_protect (save_excursion_restore, save_excursion_save ()); 3234 record_unwind_protect (save_excursion_restore, save_excursion_save ());
3224 3235
3236 /* We are going to make a lot of small modifications, and having the
3237 modification hooks called for each of them will slow us down.
3238 Instead, we announce a single modification for the entire
3239 modified region. But don't do that if the caller inhibited
3240 modification hooks, because then they don't want that. */
3241 if (!inhibit_modification_hooks)
3242 {
3243 prepare_to_modify_buffer (BEGV, ZV, NULL);
3244 specbind (Qinhibit_modification_hooks, Qt);
3245 modification_hooks_inhibited = true;
3246 }
3247
3225 ptrdiff_t i = size_a; 3248 ptrdiff_t i = size_a;
3226 ptrdiff_t j = size_b; 3249 ptrdiff_t j = size_b;
3227 /* Walk backwards through the lists of changes. This was also 3250 /* Walk backwards through the lists of changes. This was also
@@ -3230,15 +3253,13 @@ differences between the two buffers. */)
3230 while (i >= 0 || j >= 0) 3253 while (i >= 0 || j >= 0)
3231 { 3254 {
3232 /* Allow the user to quit if this gets too slow. */ 3255 /* Allow the user to quit if this gets too slow. */
3233 maybe_quit (); 3256 rarely_quit (++rbc_quitcounter);
3234 3257
3235 /* Check whether there is a change (insertion or deletion) 3258 /* Check whether there is a change (insertion or deletion)
3236 before the current position. */ 3259 before the current position. */
3237 if ((i > 0 && bit_is_set (ctx.deletions, i - 1)) || 3260 if ((i > 0 && bit_is_set (ctx.deletions, i - 1)) ||
3238 (j > 0 && bit_is_set (ctx.insertions, j - 1))) 3261 (j > 0 && bit_is_set (ctx.insertions, j - 1)))
3239 { 3262 {
3240 maybe_quit ();
3241
3242 ptrdiff_t end_a = min_a + i; 3263 ptrdiff_t end_a = min_a + i;
3243 ptrdiff_t end_b = min_b + j; 3264 ptrdiff_t end_b = min_b + j;
3244 /* Find the beginning of the current change run. */ 3265 /* Find the beginning of the current change run. */
@@ -3246,14 +3267,13 @@ differences between the two buffers. */)
3246 --i; 3267 --i;
3247 while (j > 0 && bit_is_set (ctx.insertions, j - 1)) 3268 while (j > 0 && bit_is_set (ctx.insertions, j - 1))
3248 --j; 3269 --j;
3270
3271 rarely_quit (rbc_quitcounter++);
3272
3249 ptrdiff_t beg_a = min_a + i; 3273 ptrdiff_t beg_a = min_a + i;
3250 ptrdiff_t beg_b = min_b + j; 3274 ptrdiff_t beg_b = min_b + j;
3251 eassert (beg_a >= BEGV);
3252 eassert (beg_b >= BUF_BEGV (b));
3253 eassert (beg_a <= end_a); 3275 eassert (beg_a <= end_a);
3254 eassert (beg_b <= end_b); 3276 eassert (beg_b <= end_b);
3255 eassert (end_a <= ZV);
3256 eassert (end_b <= BUF_ZV (b));
3257 eassert (beg_a < end_a || beg_b < end_b); 3277 eassert (beg_a < end_a || beg_b < end_b);
3258 if (beg_a < end_a) 3278 if (beg_a < end_a)
3259 del_range (beg_a, end_a); 3279 del_range (beg_a, end_a);
@@ -3269,6 +3289,13 @@ differences between the two buffers. */)
3269 } 3289 }
3270 unbind_to (count, Qnil); 3290 unbind_to (count, Qnil);
3271 SAFE_FREE (); 3291 SAFE_FREE ();
3292 rbc_quitcounter = 0;
3293
3294 if (modification_hooks_inhibited)
3295 {
3296 signal_after_change (BEGV, size_a, ZV - BEGV);
3297 update_compositions (BEGV, ZV, CHECK_BORDER);
3298 }
3272 3299
3273 return Qnil; 3300 return Qnil;
3274} 3301}
@@ -3296,39 +3323,45 @@ bit_is_set (const unsigned char *a, ptrdiff_t i)
3296/* Return true if the characters at position POS_A of buffer 3323/* Return true if the characters at position POS_A of buffer
3297 CTX->buffer_a and at position POS_B of buffer CTX->buffer_b are 3324 CTX->buffer_a and at position POS_B of buffer CTX->buffer_b are
3298 equal. POS_A and POS_B are zero-based. Text properties are 3325 equal. POS_A and POS_B are zero-based. Text properties are
3299 ignored. */ 3326 ignored.
3327
3328 Implementation note: this function is called inside the inner-most
3329 loops of compareseq, so it absolutely must be optimized for speed,
3330 every last bit of it. E.g., each additional use of BEGV or such
3331 likes will slow down replace-buffer-contents by dozens of percents,
3332 because builtin_lisp_symbol will be called one more time in the
3333 innermost loop. */
3300 3334
3301static bool 3335static bool
3302buffer_chars_equal (struct context *ctx, 3336buffer_chars_equal (struct context *ctx,
3303 ptrdiff_t pos_a, ptrdiff_t pos_b) 3337 ptrdiff_t pos_a, ptrdiff_t pos_b)
3304{ 3338{
3305 eassert (pos_a >= 0);
3306 pos_a += BUF_BEGV (ctx->buffer_a); 3339 pos_a += BUF_BEGV (ctx->buffer_a);
3307 eassert (pos_a >= BUF_BEGV (ctx->buffer_a));
3308 eassert (pos_a < BUF_ZV (ctx->buffer_a));
3309
3310 eassert (pos_b >= 0);
3311 pos_b += BUF_BEGV (ctx->buffer_b); 3340 pos_b += BUF_BEGV (ctx->buffer_b);
3312 eassert (pos_b >= BUF_BEGV (ctx->buffer_b));
3313 eassert (pos_b < BUF_ZV (ctx->buffer_b));
3314
3315 bool a_unibyte = BUF_ZV (ctx->buffer_a) == BUF_ZV_BYTE (ctx->buffer_a);
3316 bool b_unibyte = BUF_ZV (ctx->buffer_b) == BUF_ZV_BYTE (ctx->buffer_b);
3317 3341
3318 /* Allow the user to escape out of a slow compareseq call. */ 3342 /* Allow the user to escape out of a slow compareseq call. */
3319 maybe_quit (); 3343 rarely_quit (++rbc_quitcounter);
3320 3344
3321 ptrdiff_t bpos_a = 3345 ptrdiff_t bpos_a =
3322 a_unibyte ? pos_a : buf_charpos_to_bytepos (ctx->buffer_a, pos_a); 3346 ctx->a_unibyte ? pos_a : buf_charpos_to_bytepos (ctx->buffer_a, pos_a);
3323 ptrdiff_t bpos_b = 3347 ptrdiff_t bpos_b =
3324 b_unibyte ? pos_b : buf_charpos_to_bytepos (ctx->buffer_b, pos_b); 3348 ctx->b_unibyte ? pos_b : buf_charpos_to_bytepos (ctx->buffer_b, pos_b);
3325 3349
3326 if (a_unibyte && b_unibyte) 3350 /* We make the below a series of specific test to avoid using
3351 BUF_FETCH_CHAR_AS_MULTIBYTE, which references Lisp symbols, and
3352 is therefore significantly slower (see the note in the commentary
3353 to this function). */
3354 if (ctx->a_unibyte && ctx->b_unibyte)
3327 return BUF_FETCH_BYTE (ctx->buffer_a, bpos_a) 3355 return BUF_FETCH_BYTE (ctx->buffer_a, bpos_a)
3328 == BUF_FETCH_BYTE (ctx->buffer_b, bpos_b); 3356 == BUF_FETCH_BYTE (ctx->buffer_b, bpos_b);
3329 3357 if (ctx->a_unibyte && !ctx->b_unibyte)
3330 return BUF_FETCH_CHAR_AS_MULTIBYTE (ctx->buffer_a, bpos_a) 3358 return UNIBYTE_TO_CHAR (BUF_FETCH_BYTE (ctx->buffer_a, bpos_a))
3331 == BUF_FETCH_CHAR_AS_MULTIBYTE (ctx->buffer_b, bpos_b); 3359 == BUF_FETCH_MULTIBYTE_CHAR (ctx->buffer_b, bpos_b);
3360 if (!ctx->a_unibyte && ctx->b_unibyte)
3361 return BUF_FETCH_MULTIBYTE_CHAR (ctx->buffer_a, bpos_a)
3362 == UNIBYTE_TO_CHAR (BUF_FETCH_BYTE (ctx->buffer_b, bpos_b));
3363 return BUF_FETCH_MULTIBYTE_CHAR (ctx->buffer_a, bpos_a)
3364 == BUF_FETCH_MULTIBYTE_CHAR (ctx->buffer_b, bpos_b);
3332} 3365}
3333 3366
3334 3367