Index: vt.h =================================================================== --- vt.h (revision 265909) +++ vt.h (working copy) @@ -43,10 +43,15 @@ #include #include #include +#include #include #include +//#include +//#include #include +//#include + #include "opt_syscons.h" #include "opt_splash.h" @@ -154,13 +159,26 @@ * of a rectangle that needs to be redrawn (vb_dirtyrect). Because this * approach seemed to cause suboptimal performance (when the top left * and the bottom right of the screen are modified), it also uses a set - * of bitmasks to keep track of the rows and columns (mod 64) that have + * of bitmasks to keep track of the rows and columns (mod 32) that have * been modified. */ +//#if SIG_ATOMIC_MAX == INT64_MAX +//#define VBM_MASK_TYPE uint64_t +//#define VBM_MASK_BITS 64 +//#define VBM_DIRTY UINT64_MAX +//#else +//#define VBM_MASK_TYPE uint32_t +//#define VBM_MASK_BITS 64 +//#define VBM_DIRTY UINT32_MAX +//#endif + +#define VBM_MASK_TYPE unsigned int +#define VBM_MASK_BITS sizeof(VBM_MASK_TYPE) * 8 +#define VBM_DIRTY UINT_MAX + struct vt_bufmask { - uint64_t vbm_row, vbm_col; -#define VBM_DIRTY UINT64_MAX + VBM_MASK_TYPE vbm_row, vbm_col; }; struct vt_buf { @@ -227,9 +245,9 @@ #define VTBUF_ISCURSOR(vb, r, c) \ vtbuf_iscursor((vb), (r), (c)) #define VTBUF_DIRTYROW(mask, row) \ - ((mask)->vbm_row & ((uint64_t)1 << ((row) % 64))) + ((mask)->vbm_row & ((VBM_MASK_TYPE)1 << ((row) % VBM_MASK_BITS))) #define VTBUF_DIRTYCOL(mask, col) \ - ((mask)->vbm_col & ((uint64_t)1 << ((col) % 64))) + ((mask)->vbm_col & ((VBM_MASK_TYPE)1 << ((col) % VBM_MASK_BITS))) #define VTBUF_SPACE_CHAR (' ' | TC_WHITE << 26 | TC_BLACK << 29) #define VHS_SET 0 Index: vt_buf.c =================================================================== --- vt_buf.c (revision 265909) +++ vt_buf.c (working copy) @@ -194,31 +194,31 @@ return (0); } -static inline uint64_t +static inline VBM_MASK_TYPE vtbuf_dirty_axis(unsigned int begin, unsigned int end) { - uint64_t left, right, mask; + VBM_MASK_TYPE left, right, mask; /* - * Mark all bits between begin % 64 and end % 64 dirty. - * This code is functionally equivalent to: + * Mark all bits between begin % VBM_MASK_BITS and end % VBM_MASK_BITS + * dirty. This code is functionally equivalent to: * * for (i = begin; i < end; i++) - * mask |= (uint64_t)1 << (i % 64); + * mask |= (VBM_MASK_TYPE)1 << (i % VBM_MASK_BITS); */ /* Obvious case. Mark everything dirty. */ - if (end - begin >= 64) + if (end - begin >= VBM_MASK_BITS) return (VBM_DIRTY); /* 1....0; used bits on the left. */ - left = VBM_DIRTY << begin % 64; + left = VBM_DIRTY << begin % VBM_MASK_BITS; /* 0....1; used bits on the right. */ - right = VBM_DIRTY >> -end % 64; + right = VBM_DIRTY >> -end % VBM_MASK_BITS; /* * Only take the intersection. If the result of that is 0, it - * means that the selection crossed a 64 bit boundary along the + * means that the selection crossed a VBM_MASK_BITS bit boundary along the * way, which means we have to take the complement. */ mask = left & right; @@ -231,7 +231,7 @@ vtbuf_dirty(struct vt_buf *vb, const term_rect_t *area) { - VTBUF_LOCK(vb); +// VTBUF_LOCK(vb); if (vb->vb_dirtyrect.tr_begin.tp_row > area->tr_begin.tp_row) vb->vb_dirtyrect.tr_begin.tp_row = area->tr_begin.tp_row; if (vb->vb_dirtyrect.tr_begin.tp_col > area->tr_begin.tp_col) @@ -240,11 +240,11 @@ vb->vb_dirtyrect.tr_end.tp_row = area->tr_end.tp_row; if (vb->vb_dirtyrect.tr_end.tp_col < area->tr_end.tp_col) vb->vb_dirtyrect.tr_end.tp_col = area->tr_end.tp_col; - vb->vb_dirtymask.vbm_row |= - vtbuf_dirty_axis(area->tr_begin.tp_row, area->tr_end.tp_row); - vb->vb_dirtymask.vbm_col |= - vtbuf_dirty_axis(area->tr_begin.tp_col, area->tr_end.tp_col); - VTBUF_UNLOCK(vb); + atomic_set_int(&vb->vb_dirtymask.vbm_row, + vtbuf_dirty_axis(area->tr_begin.tp_row, area->tr_end.tp_row)); + atomic_set_int(&vb->vb_dirtymask.vbm_col, + vtbuf_dirty_axis(area->tr_begin.tp_col, area->tr_end.tp_col)); +// VTBUF_UNLOCK(vb); } static inline void @@ -264,7 +264,8 @@ vb->vb_dirtyrect.tr_begin = vb->vb_scr_size; vb->vb_dirtyrect.tr_end.tp_row = vb->vb_dirtyrect.tr_end.tp_col = 0; - vb->vb_dirtymask.vbm_row = vb->vb_dirtymask.vbm_col = 0; + atomic_store_rel_int(&vb->vb_dirtymask.vbm_row, 0); + atomic_store_rel_int(&vb->vb_dirtymask.vbm_col, 0); } void @@ -271,11 +272,12 @@ vtbuf_undirty(struct vt_buf *vb, term_rect_t *r, struct vt_bufmask *m) { - VTBUF_LOCK(vb); +// VTBUF_LOCK(vb); *r = vb->vb_dirtyrect; - *m = vb->vb_dirtymask; + m->vbm_row = atomic_readandclear_int(&vb->vb_dirtymask.vbm_row); + m->vbm_col = atomic_readandclear_int(&vb->vb_dirtymask.vbm_col); vtbuf_make_undirty(vb); - VTBUF_UNLOCK(vb); +// VTBUF_UNLOCK(vb); } void @@ -370,9 +372,9 @@ ("vtbuf_fill_locked end.tp_col %d must be <= screen height %d", r->tr_end.tp_col, vb->vb_scr_size.tp_col)); - VTBUF_LOCK(vb); +// VTBUF_LOCK(vb); vtbuf_fill(vb, r, c); - VTBUF_UNLOCK(vb); +// VTBUF_UNLOCK(vb); vtbuf_dirty(vb, r); } @@ -514,9 +516,9 @@ row = vb->vb_rows[(vb->vb_curroffset + p->tp_row) % VTBUF_MAX_HEIGHT(vb)]; if (row[p->tp_col] != c) { - VTBUF_LOCK(vb); +// VTBUF_LOCK(vb); row[p->tp_col] = c; - VTBUF_UNLOCK(vb); +// VTBUF_UNLOCK(vb); vtbuf_dirty_cell(vb, p); } }