Index: priv/main/vex_main.c
===================================================================
--- priv/main/vex_main.c	(revision 1788)
+++ priv/main/vex_main.c	(working copy)
@@ -174,7 +174,6 @@
    vexSetAllocMode ( VexAllocModeTEMP );
 }
 
-
 /* --------- Make a translation. --------- */
 
 /* Exported to library client. */
@@ -207,14 +206,13 @@
    IRSB*           irsb;
    HInstrArray*    vcode;
    HInstrArray*    rcode;
-   Int             i, j, k, out_used, guest_sizeB;
+   Int             i, j, k, out_used, guest_sizeB, spill_offsetB;
    Int             offB_TISTART, offB_TILEN;
    UChar           insn_bytes[32];
    IRType          guest_word_type;
    IRType          host_word_type;
    Bool            mode64;
 
-   guest_layout           = NULL;
    available_real_regs    = NULL;
    n_available_real_regs  = 0;
    isMove                 = NULL;
@@ -237,6 +235,7 @@
    mode64                 = False;
 
    vex_traceflags = vta->traceflags;
+   guest_layout = LibVEX_GetGuestLayout(vta->arch_guest);
 
    vassert(vex_initdone);
    vexSetAllocModeTEMP_and_clear();
@@ -337,7 +336,6 @@
          specHelper       = guest_x86_spechelper;
          guest_sizeB      = sizeof(VexGuestX86State);
          guest_word_type  = Ity_I32;
-         guest_layout     = &x86guest_layout;
          offB_TISTART     = offsetof(VexGuestX86State,guest_TISTART);
          offB_TILEN       = offsetof(VexGuestX86State,guest_TILEN);
          vassert(are_valid_hwcaps(VexArchX86, vta->archinfo_guest.hwcaps));
@@ -353,7 +351,6 @@
          specHelper       = guest_amd64_spechelper;
          guest_sizeB      = sizeof(VexGuestAMD64State);
          guest_word_type  = Ity_I64;
-         guest_layout     = &amd64guest_layout;
          offB_TISTART     = offsetof(VexGuestAMD64State,guest_TISTART);
          offB_TILEN       = offsetof(VexGuestAMD64State,guest_TILEN);
          vassert(are_valid_hwcaps(VexArchAMD64, vta->archinfo_guest.hwcaps));
@@ -369,7 +366,6 @@
          specHelper       = guest_arm_spechelper;
          guest_sizeB      = sizeof(VexGuestARMState);
          guest_word_type  = Ity_I32;
-         guest_layout     = &armGuest_layout;
          offB_TISTART     = 0; /* hack ... arm has bitrot */
          offB_TILEN       = 0; /* hack ... arm has bitrot */
          vassert(are_valid_hwcaps(VexArchARM, vta->archinfo_guest.hwcaps));
@@ -381,7 +377,6 @@
          specHelper       = guest_ppc32_spechelper;
          guest_sizeB      = sizeof(VexGuestPPC32State);
          guest_word_type  = Ity_I32;
-         guest_layout     = &ppc32Guest_layout;
          offB_TISTART     = offsetof(VexGuestPPC32State,guest_TISTART);
          offB_TILEN       = offsetof(VexGuestPPC32State,guest_TILEN);
          vassert(are_valid_hwcaps(VexArchPPC32, vta->archinfo_guest.hwcaps));
@@ -397,7 +392,6 @@
          specHelper       = guest_ppc64_spechelper;
          guest_sizeB      = sizeof(VexGuestPPC64State);
          guest_word_type  = Ity_I64;
-         guest_layout     = &ppc64Guest_layout;
          offB_TISTART     = offsetof(VexGuestPPC64State,guest_TISTART);
          offB_TILEN       = offsetof(VexGuestPPC64State,guest_TILEN);
          vassert(are_valid_hwcaps(VexArchPPC64, vta->archinfo_guest.hwcaps));
@@ -584,12 +578,17 @@
       vex_printf("\n");
    }
 
+   /* This reflects LibVEX's hard-wired knowledge of the baseBlock
+      layout: the guest state, then a variable-sized area following
+      it for shadow state, and then the spill area. */
+   spill_offsetB = guest_sizeB + vta->abiinfo_both.shadow_guest_size;
+
    /* Register allocate. */
    rcode = doRegisterAllocation ( vcode, available_real_regs,
                                   n_available_real_regs,
                                   isMove, getRegUsage, mapRegs, 
                                   genSpill, genReload, directReload, 
-                                  guest_sizeB,
+                                  spill_offsetB,
                                   ppInstr, ppReg, mode64 );
 
    vexAllocSanityCheck();
@@ -706,6 +705,21 @@
 }
 
 
+/* Return a pointer to the guest layout for an architecture */
+VexGuestLayout *LibVEX_GetGuestLayout ( VexArch arch )
+{
+   switch (arch) {
+      case VexArchX86:     return &x86guest_layout;
+      case VexArchAMD64:   return &amd64guest_layout;
+      case VexArchARM:     return &armGuest_layout;
+      case VexArchPPC32:   return &ppc32Guest_layout;
+      case VexArchPPC64:   return &ppc64Guest_layout;
+      default:
+         vpanic("LibVEX_GetGuestLayout: unsupported insn set");
+	 return 0;
+   }
+}
+
 /* Write default settings info *vai. */
 void LibVEX_default_VexArchInfo ( /*OUT*/VexArchInfo* vai )
 {
@@ -716,6 +730,7 @@
 /* Write default settings info *vbi. */
 void LibVEX_default_VexAbiInfo ( /*OUT*/VexAbiInfo* vbi )
 {
+   vbi->shadow_guest_size              = 0;
    vbi->guest_stack_redzone_size       = 0;
    vbi->guest_ppc_zap_RZ_at_blr        = False;
    vbi->guest_ppc_zap_RZ_at_bl         = NULL;
Index: priv/host-generic/reg_alloc2.c
===================================================================
--- priv/host-generic/reg_alloc2.c	(revision 1788)
+++ priv/host-generic/reg_alloc2.c	(working copy)
@@ -331,7 +331,7 @@
    HInstr* (*genSpill) ( HReg, Int, Bool ),
    HInstr* (*genReload) ( HReg, Int, Bool ),
    HInstr* (*directReload) ( HInstr*, HReg, Short ),
-   Int     guest_sizeB,
+   Int     spill_offsetB,
 
    /* For debug printing only. */
    void (*ppInstr) ( HInstr*, Bool ),
@@ -397,7 +397,7 @@
    Bool do_sanity_check;
 
    vassert(0 == LibVEX_N_SPILL_BYTES % 16);
-   vassert(0 == guest_sizeB % 8);
+   vassert(0 == spill_offsetB % 8);
 
    /* The live range numbers are signed shorts, and so limiting the
       number of insns to 10000 comfortably guards against them
@@ -848,10 +848,7 @@
 
       }
 
-      /* This reflects LibVEX's hard-wired knowledge of the baseBlock
-         layout: the guest state, then an equal sized area following
-         it for shadow state, and then the spill area. */
-      vreg_lrs[j].spill_offset = toShort(guest_sizeB * 2 + k * 8);
+      vreg_lrs[j].spill_offset = toShort(spill_offsetB + k * 8);
 
       /* if (j > max_ss_no) */
       /*    max_ss_no = j; */
Index: pub/libvex.h
===================================================================
--- pub/libvex.h	(revision 1788)
+++ pub/libvex.h	(working copy)
@@ -175,6 +175,13 @@
 
 typedef
    struct {
+      /* All platforms: how many bytes of space beyond the regular
+	 guest state should be set aside for the use of
+	 instrumentation code? The sum of this and the regular guest
+	 state is the offset where the spill slots start.  Should be
+	 a multiple of 8. */
+      Int shadow_guest_size;
+
       /* PPC and AMD64 GUESTS only: how many bytes below the 
          stack pointer are validly addressible? */
       Int guest_stack_redzone_size;
@@ -337,9 +344,11 @@
    LibVEX defines the layout for the guest state, in the file
    pub/libvex_guest_<arch>.h.  The struct will have an 8-aligned size.
    Each translated bb is assumed to be entered with a specified
-   register pointing at such a struct.  Beyond that is a shadow
-   state area with the same size as the struct.  Beyond that is
-   a spill area that LibVEX may spill into.  It must have size
+   register pointing at such a struct.  Beyond that is a shadow state
+   area for the use of instrumentation code. Often it will have the
+   same size and layout as the regular guest state, or an integer
+   multiple, but it doesn't have to be. Beyond that is a spill area
+   that LibVEX may spill into.  It must have size
    LibVEX_N_SPILL_BYTES, and this must be a 16-aligned number.
 
    On entry, the baseblock pointer register must be 8-aligned.
@@ -347,6 +356,11 @@
 
 #define LibVEX_N_SPILL_BYTES 2048
 
+/* Fetch the guest layout for a given architecture. It will also be
+   supplied as parameter to the instrumentation functions, but use
+   this if you want access to it at other times. */
+extern
+VexGuestLayout *LibVEX_GetGuestLayout ( VexArch arch );
 
 /*-------------------------------------------------------*/
 /*--- Initialisation of the library                   ---*/
