Changeset be7ca34 in rtems


Ignore:
Timestamp:
Sep 25, 2008, 7:32:15 PM (11 years ago)
Author:
Joel Sherrill <joel.sherrill@…>
Branches:
4.10, 4.11, master
Children:
d66a6cb
Parents:
fa6f92df
Message:

2008-09-25 Joel Sherrill <joel.sherrill@…>

  • Makefile.am, configure.ac, sh7032/score/cpu_asm.c, sh7045/score/cpu_asm.c, sh7750/score/cpu_asm.c: Move duplicated context switch code to score/cpu and provide an interrupt handling stub for the GDB SuperH simulator since it does not support interrupts or devices. This has been used to run tests on the simulator BSP as SH1, SH2, and SH4.
  • shgdb/score/cpu_asm.c, shgdb/score/ispshgdb.c: New files.
Location:
c/src/lib/libcpu/sh
Files:
2 added
6 edited

Legend:

Unmodified
Added
Removed
  • c/src/lib/libcpu/sh/ChangeLog

    rfa6f92df rbe7ca34  
     12008-09-25      Joel Sherrill <joel.sherrill@oarcorp.com>
     2
     3        * Makefile.am, configure.ac, sh7032/score/cpu_asm.c,
     4        sh7045/score/cpu_asm.c, sh7750/score/cpu_asm.c: Move duplicated
     5        context switch code to score/cpu and provide an interrupt handling
     6        stub for the GDB SuperH simulator since it does not support
     7        interrupts or devices. This has been used to run tests on the
     8        simulator BSP as SH1, SH2, and SH4.
     9        * shgdb/score/cpu_asm.c, shgdb/score/ispshgdb.c: New files.
     10
    1112008-09-05      Joel Sherrill <joel.sherrill@OARcorp.com>
    212
  • c/src/lib/libcpu/sh/Makefile.am

    rfa6f92df rbe7ca34  
    120120endif
    121121
     122if shgdb
     123## sh7045/score
     124noinst_PROGRAMS += shgdb/score.rel
     125shgdb_score_rel_SOURCES = shgdb/score/cpu_asm.c shgdb/score/ispshgdb.c
     126shgdb_score_rel_CPPFLAGS = $(AM_CPPFLAGS)
     127shgdb_score_rel_LDFLAGS = $(RTEMS_RELLDFLAGS)
     128endif
     129
    122130include $(srcdir)/preinstall.am
    123131include $(top_srcdir)/../../../automake/local.am
  • c/src/lib/libcpu/sh/configure.ac

    rfa6f92df rbe7ca34  
    2525AM_CONDITIONAL([sh7045],[test x"$RTEMS_CPU_MODEL" = x"sh7045"])
    2626AM_CONDITIONAL([sh7750],[test x"$RTEMS_CPU_MODEL" = x"sh7750"])
     27AM_CONDITIONAL([shgdb],[test x"$RTEMS_CPU_MODEL" = x"shgdb"])
    2728
    2829RTEMS_AMPOLISH3
  • c/src/lib/libcpu/sh/sh7032/score/cpu_asm.c

    rfa6f92df rbe7ca34  
    126126
    127127/*
    128  *  _CPU_Context_save_fp_context
    129  *
    130  *  This routine is responsible for saving the FP context
    131  *  at *fp_context_ptr.  If the point to load the FP context
    132  *  from is changed then the pointer is modified by this routine.
    133  *
    134  *  Sometimes a macro implementation of this is in cpu.h which dereferences
    135  *  the ** and a similarly named routine in this file is passed something
    136  *  like a (Context_Control_fp *).  The general rule on making this decision
    137  *  is to avoid writing assembly language.
    138  */
    139 
    140 void _CPU_Context_save_fp(
    141   Context_Control_fp **fp_context_ptr
    142 )
    143 {
    144 }
    145 
    146 /*
    147  *  _CPU_Context_restore_fp_context
    148  *
    149  *  This routine is responsible for restoring the FP context
    150  *  at *fp_context_ptr.  If the point to load the FP context
    151  *  from is changed then the pointer is modified by this routine.
    152  *
    153  *  Sometimes a macro implementation of this is in cpu.h which dereferences
    154  *  the ** and a similarly named routine in this file is passed something
    155  *  like a (Context_Control_fp *).  The general rule on making this decision
    156  *  is to avoid writing assembly language.
    157  */
    158 
    159 void _CPU_Context_restore_fp(
    160   Context_Control_fp **fp_context_ptr
    161 )
    162 {
    163 }
    164 
    165 /*  _CPU_Context_switch
    166  *
    167  *  This routine performs a normal non-FP context switch.
    168  */
    169 
    170 /*  within __CPU_Context_switch:
    171  *  _CPU_Context_switch
    172  *  _CPU_Context_restore
    173  *
    174  *  This routine is generally used only to restart self in an
    175  *  efficient manner.  It may simply be a label in _CPU_Context_switch.
    176  *
    177  * NOTE: It should be safe not to store r4, r5
    178  *
    179  * NOTE: It is doubtful if r0 is really needed to be stored
    180  *
    181  * NOTE: gbr is added, but should not be necessary, as it is
    182  *      only used globally in this port.
    183  */
    184 
    185 /*
    186  * FIXME: This is an ugly hack, but we wanted to avoid recalculating
    187  *        the offset each time Context_Control is changed
    188  */
    189 void __CPU_Context_switch(
    190   Context_Control  *run,        /* r4 */
    191   Context_Control  *heir        /* r5 */
    192 )
    193 {
    194 
    195 asm volatile(
    196         ".global __CPU_Context_switch\n"
    197 "__CPU_Context_switch:\n"
    198 
    199 "       add     %0,r4\n"
    200 
    201 "       stc.l   sr,@-r4\n"
    202 "       stc.l   gbr,@-r4\n"
    203 "       mov.l   r0,@-r4\n"
    204 "       mov.l   r1,@-r4\n"
    205 "       mov.l   r2,@-r4\n"
    206 "       mov.l   r3,@-r4\n"
    207 
    208 "       mov.l   r6,@-r4\n"
    209 "       mov.l   r7,@-r4\n"
    210 "       mov.l   r8,@-r4\n"
    211 "       mov.l   r9,@-r4\n"
    212 "       mov.l   r10,@-r4\n"
    213 "       mov.l   r11,@-r4\n"
    214 "       mov.l   r12,@-r4\n"
    215 "       mov.l   r13,@-r4\n"
    216 "       mov.l   r14,@-r4\n"
    217 "       sts.l   pr,@-r4\n"
    218 "       sts.l   mach,@-r4\n"
    219 "       sts.l   macl,@-r4\n"
    220 "       mov.l   r15,@-r4\n"
    221 
    222 "       mov     r5, r4\n"
    223   :: "i" (sizeof(Context_Control))
    224   );
    225 
    226   asm volatile(
    227         ".global __CPU_Context_restore\n"
    228 "__CPU_Context_restore:\n"
    229 "       mov.l   @r4+,r15\n"
    230 "       lds.l   @r4+,macl\n"
    231 "       lds.l   @r4+,mach\n"
    232 "       lds.l   @r4+,pr\n"
    233 "       mov.l   @r4+,r14\n"
    234 "       mov.l   @r4+,r13\n"
    235 "       mov.l   @r4+,r12\n"
    236 "       mov.l   @r4+,r11\n"
    237 "       mov.l   @r4+,r10\n"
    238 "       mov.l   @r4+,r9\n"
    239 "       mov.l   @r4+,r8\n"
    240 "       mov.l   @r4+,r7\n"
    241 "       mov.l   @r4+,r6\n"
    242 
    243 "       mov.l   @r4+,r3\n"
    244 "       mov.l   @r4+,r2\n"
    245 "       mov.l   @r4+,r1\n"
    246 "       mov.l   @r4+,r0\n"
    247 "       ldc.l   @r4+,gbr\n"
    248 "       ldc.l   @r4+,sr\n"
    249 
    250 "       rts\n"
    251 "       nop\n" );
    252 }
    253 
    254 /*
    255128 *  This routine provides the RTEMS interrupt management.
    256129 */
  • c/src/lib/libcpu/sh/sh7045/score/cpu_asm.c

    rfa6f92df rbe7ca34  
    128128
    129129/*
    130  *  _CPU_Context_save_fp_context
    131  *
    132  *  This routine is responsible for saving the FP context
    133  *  at *fp_context_ptr.  If the point to load the FP context
    134  *  from is changed then the pointer is modified by this routine.
    135  *
    136  *  Sometimes a macro implementation of this is in cpu.h which dereferences
    137  *  the ** and a similarly named routine in this file is passed something
    138  *  like a (Context_Control_fp *).  The general rule on making this decision
    139  *  is to avoid writing assembly language.
    140  */
    141 
    142 void _CPU_Context_save_fp(
    143   Context_Control_fp **fp_context_ptr
    144 )
    145 {
    146 }
    147 
    148 /*
    149  *  _CPU_Context_restore_fp_context
    150  *
    151  *  This routine is responsible for restoring the FP context
    152  *  at *fp_context_ptr.  If the point to load the FP context
    153  *  from is changed then the pointer is modified by this routine.
    154  *
    155  *  Sometimes a macro implementation of this is in cpu.h which dereferences
    156  *  the ** and a similarly named routine in this file is passed something
    157  *  like a (Context_Control_fp *).  The general rule on making this decision
    158  *  is to avoid writing assembly language.
    159  */
    160 
    161 void _CPU_Context_restore_fp(
    162   Context_Control_fp **fp_context_ptr
    163 )
    164 {
    165 }
    166 
    167 /*  _CPU_Context_switch
    168  *
    169  *  This routine performs a normal non-FP context switch.
    170  */
    171 
    172 /*  within __CPU_Context_switch:
    173  *  _CPU_Context_switch
    174  *  _CPU_Context_restore
    175  *
    176  *  This routine is generally used only to restart self in an
    177  *  efficient manner.  It may simply be a label in _CPU_Context_switch.
    178  *
    179  * NOTE: It should be safe not to store r4, r5
    180  *
    181  * NOTE: It is doubtful if r0 is really needed to be stored
    182  *
    183  * NOTE: gbr is added, but should not be necessary, as it is
    184  *      only used globally in this port.
    185  */
    186 
    187 /*
    188  * FIXME: This is an ugly hack, but we wanted to avoid recalculating
    189  *        the offset each time Context_Control is changed
    190  */
    191 void __CPU_Context_switch(
    192   Context_Control  *run,        /* r4 */
    193   Context_Control  *heir        /* r5 */
    194 )
    195 {
    196 
    197 asm volatile("\n\
    198         .global __CPU_Context_switch\n\
    199 __CPU_Context_switch:\n\
    200 \n\
    201         add     %0,r4\n\
    202   \n\
    203         stc.l   sr,@-r4\n\
    204         stc.l   gbr,@-r4\n\
    205         mov.l   r0,@-r4\n\
    206         mov.l   r1,@-r4\n\
    207         mov.l   r2,@-r4\n\
    208         mov.l   r3,@-r4\n\
    209 \n\
    210         mov.l   r6,@-r4\n\
    211         mov.l   r7,@-r4\n\
    212         mov.l   r8,@-r4\n\
    213         mov.l   r9,@-r4\n\
    214         mov.l   r10,@-r4\n\
    215         mov.l   r11,@-r4\n\
    216         mov.l   r12,@-r4\n\
    217         mov.l   r13,@-r4\n\
    218         mov.l   r14,@-r4\n\
    219         sts.l   pr,@-r4\n\
    220         sts.l   mach,@-r4\n\
    221         sts.l   macl,@-r4\n\
    222         mov.l   r15,@-r4\n\
    223 \n\
    224         mov     r5, r4"
    225   :: "i" (sizeof(Context_Control))
    226   );
    227 
    228   asm volatile("\n\
    229         .global __CPU_Context_restore\n\
    230 __CPU_Context_restore:\n\
    231         mov.l   @r4+,r15\n\
    232         lds.l   @r4+,macl\n\
    233         lds.l   @r4+,mach\n\
    234         lds.l   @r4+,pr\n\
    235         mov.l   @r4+,r14\n\
    236         mov.l   @r4+,r13\n\
    237         mov.l   @r4+,r12\n\
    238         mov.l   @r4+,r11\n\
    239         mov.l   @r4+,r10\n\
    240         mov.l   @r4+,r9\n\
    241         mov.l   @r4+,r8\n\
    242         mov.l   @r4+,r7\n\
    243         mov.l   @r4+,r6\n\
    244 \n\
    245         mov.l   @r4+,r3\n\
    246         mov.l   @r4+,r2\n\
    247         mov.l   @r4+,r1\n\
    248         mov.l   @r4+,r0\n\
    249         ldc.l   @r4+,gbr\n\
    250         ldc.l   @r4+,sr\n\
    251 \n\
    252         rts\n\
    253         nop" );
    254 }
    255 
    256 /*
    257130 *  This routine provides the RTEMS interrupt management.
    258131 */
  • c/src/lib/libcpu/sh/sh7750/score/cpu_asm.c

    rfa6f92df rbe7ca34  
    5050
    5151register unsigned long  *stack_ptr asm("r15");
    52 
    53 /*
    54  *  _CPU_Context_save_fp_context
    55  *
    56  *  This routine is responsible for saving the FP context
    57  *  at *fp_context_ptr.  If the point to load the FP context
    58  *  from is changed then the pointer is modified by this routine.
    59  *
    60  *  Sometimes a macro implementation of this is in cpu.h which dereferences
    61  *  the ** and a similarly named routine in this file is passed something
    62  *  like a (Context_Control_fp *).  The general rule on making this decision
    63  *  is to avoid writing assembly language.
    64  */
    65 
    66 void _CPU_Context_save_fp(
    67   Context_Control_fp **fp_context_ptr     /* r4 */
    68 )
    69 {
    70 #if SH_HAS_FPU
    71 
    72 asm volatile("\n\
    73     mov.l   @%0,r4    \n\
    74     add     %1,r4\n\
    75     sts.l   fpscr,@-r4\n\
    76     sts.l   fpul,@-r4\n\
    77     lds     %2,fpscr\n\
    78     fmov    dr14,@-r4\n\
    79     fmov    dr12,@-r4\n\
    80     fmov    dr10,@-r4\n\
    81     fmov    dr8,@-r4\n\
    82     fmov    dr6,@-r4\n\
    83     fmov    dr4,@-r4\n\
    84     fmov    dr2,@-r4\n\
    85     fmov    dr0,@-r4\n\
    86     "
    87 #ifdef SH4_USE_X_REGISTERS
    88     "\
    89     lds     %3,fpscr\n\
    90     fmov    xd14,@-r4\n\
    91     fmov    xd12,@-r4\n\
    92     fmov    xd10,@-r4\n\
    93     fmov    xd8,@-r4\n\
    94     fmov    xd6,@-r4\n\
    95     fmov    xd4,@-r4\n\
    96     fmov    xd2,@-r4\n\
    97     fmov    xd0,@-r4\n\
    98     "
    99 #endif
    100    "lds     %4,fpscr\n\
    101    "
    102     :
    103     : "r"(fp_context_ptr), "r"(sizeof(Context_Control_fp)),
    104       "r"(SH4_FPSCR_SZ), "r"(SH4_FPSCR_PR | SH4_FPSCR_SZ), "r"(SH4_FPSCR_PR)
    105     : "r4", "r0");
    106 
    107 #endif
    108 }
    109 
    110 /*
    111  *  _CPU_Context_restore_fp_context
    112  *
    113  *  This routine is responsible for restoring the FP context
    114  *  at *fp_context_ptr.  If the point to load the FP context
    115  *  from is changed then the pointer is modified by this routine.
    116  *
    117  *  Sometimes a macro implementation of this is in cpu.h which dereferences
    118  *  the ** and a similarly named routine in this file is passed something
    119  *  like a (Context_Control_fp *).  The general rule on making this decision
    120  *  is to avoid writing assembly language.
    121  */
    122 
    123 void _CPU_Context_restore_fp(
    124   Context_Control_fp **fp_context_ptr     /* r4 */
    125 )
    126 {
    127 #if SH_HAS_FPU
    128 
    129 asm volatile("\n\
    130     mov.l   @%0,r4    \n\
    131     "
    132 #ifdef SH4_USE_X_REGISTERS
    133     "\n\
    134     lds     %1,fpscr\n\
    135     fmov    @r4+,xd0\n\
    136     fmov    @r4+,xd2\n\
    137     fmov    @r4+,xd4\n\
    138     fmov    @r4+,xd6\n\
    139     fmov    @r4+,xd8\n\
    140     fmov    @r4+,xd10\n\
    141     fmov    @r4+,xd12\n\
    142     fmov    @r4+,xd14\n\
    143     "
    144 #endif
    145     "\n\
    146     lds     %2,fpscr\n\
    147     fmov    @r4+,dr0\n\
    148     fmov    @r4+,dr2\n\
    149     fmov    @r4+,dr4\n\
    150     fmov    @r4+,dr6\n\
    151     fmov    @r4+,dr8\n\
    152     fmov    @r4+,dr10\n\
    153     fmov    @r4+,dr12\n\
    154     fmov    @r4+,dr14\n\
    155     lds.l   @r4+,fpul\n\
    156     lds.l   @r4+,fpscr\n\
    157     " :
    158     : "r"(fp_context_ptr), "r"(SH4_FPSCR_PR | SH4_FPSCR_SZ), "r"(SH4_FPSCR_SZ)
    159     : "r4", "r0");
    160 
    161 #endif
    162 }
    163 
    164 /*  _CPU_Context_switch
    165  *
    166  *  This routine performs a normal non-FP context switch.
    167  */
    168 
    169 /*  within __CPU_Context_switch:
    170  *  _CPU_Context_switch
    171  *  _CPU_Context_restore
    172  *
    173  *  This routine is generally used only to restart self in an
    174  *  efficient manner.  It may simply be a label in _CPU_Context_switch.
    175  *
    176  * NOTE: It should be safe not to store r4, r5
    177  *
    178  * NOTE: It is doubtful if r0 is really needed to be stored
    179  *
    180  * NOTE: gbr is added, but should not be necessary, as it is
    181  *      only used globally in this port.
    182  */
    183 
    184 /*
    185  * FIXME: This is an ugly hack, but we wanted to avoid recalculating
    186  *        the offset each time Context_Control is changed
    187  */
    188 void __CPU_Context_switch(
    189   Context_Control  *run,        /* r4 */
    190   Context_Control  *heir        /* r5 */
    191 )
    192 {
    193 
    194 asm volatile("\n\
    195         .global __CPU_Context_switch\n\
    196 __CPU_Context_switch:\n\
    197 \n\
    198         add     %0,r4\n\
    199   \n\
    200         stc.l   sr,@-r4\n\
    201         stc.l   gbr,@-r4\n\
    202         mov.l   r0,@-r4\n\
    203         mov.l   r1,@-r4\n\
    204         mov.l   r2,@-r4\n\
    205         mov.l   r3,@-r4\n\
    206 \n\
    207         mov.l   r6,@-r4\n\
    208         mov.l   r7,@-r4\n\
    209         mov.l   r8,@-r4\n\
    210         mov.l   r9,@-r4\n\
    211         mov.l   r10,@-r4\n\
    212         mov.l   r11,@-r4\n\
    213         mov.l   r12,@-r4\n\
    214         mov.l   r13,@-r4\n\
    215         mov.l   r14,@-r4\n\
    216         sts.l   pr,@-r4\n\
    217         sts.l   mach,@-r4\n\
    218         sts.l   macl,@-r4\n\
    219         mov.l   r15,@-r4\n\
    220 \n\
    221         mov     r5, r4"
    222   :: "i" (sizeof(Context_Control))
    223   );
    224 
    225   asm volatile("\n\
    226         .global __CPU_Context_restore\n\
    227 __CPU_Context_restore:\n\
    228         mov.l   @r4+,r15\n\
    229         lds.l   @r4+,macl\n\
    230         lds.l   @r4+,mach\n\
    231         lds.l   @r4+,pr\n\
    232         mov.l   @r4+,r14\n\
    233         mov.l   @r4+,r13\n\
    234         mov.l   @r4+,r12\n\
    235         mov.l   @r4+,r11\n\
    236         mov.l   @r4+,r10\n\
    237         mov.l   @r4+,r9\n\
    238         mov.l   @r4+,r8\n\
    239         mov.l   @r4+,r7\n\
    240         mov.l   @r4+,r6\n\
    241 \n\
    242         mov.l   @r4+,r3\n\
    243         mov.l   @r4+,r2\n\
    244         mov.l   @r4+,r1\n\
    245         mov.l   @r4+,r0\n\
    246         ldc.l   @r4+,gbr\n\
    247         ldc.l   @r4+,sr\n\
    248 \n\
    249         rts\n\
    250         nop" );
    251 }
    25252
    25353/*
Note: See TracChangeset for help on using the changeset viewer.